diff options
| author | robot-piglet <[email protected]> | 2024-12-15 15:50:26 +0300 |
|---|---|---|
| committer | robot-piglet <[email protected]> | 2024-12-15 16:03:15 +0300 |
| commit | 56f00a2ec101e22b68e264dcd73855552f92d454 (patch) | |
| tree | 9d0a365064381edb7b257a3ad2bbb6ade73f665c /contrib/python | |
| parent | d6493c77ed0f15c94153dc50016fc6281493c205 (diff) | |
Intermediate changes
commit_hash:2740d96723f570c63ae060b85601b0d1ff88f8a4
Diffstat (limited to 'contrib/python')
| -rw-r--r-- | contrib/python/asttokens/.dist-info/METADATA | 26 | ||||
| -rw-r--r-- | contrib/python/asttokens/README.rst | 4 | ||||
| -rw-r--r-- | contrib/python/asttokens/asttokens/asttokens.py | 51 | ||||
| -rw-r--r-- | contrib/python/asttokens/asttokens/line_numbers.py | 2 | ||||
| -rw-r--r-- | contrib/python/asttokens/asttokens/mark_tokens.py | 70 | ||||
| -rw-r--r-- | contrib/python/asttokens/asttokens/util.py | 223 | ||||
| -rw-r--r-- | contrib/python/asttokens/asttokens/version.py | 2 | ||||
| -rw-r--r-- | contrib/python/asttokens/ya.make | 6 |
8 files changed, 158 insertions, 226 deletions
diff --git a/contrib/python/asttokens/.dist-info/METADATA b/contrib/python/asttokens/.dist-info/METADATA index 233cd9355af..2139497f4da 100644 --- a/contrib/python/asttokens/.dist-info/METADATA +++ b/contrib/python/asttokens/.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: asttokens -Version: 2.4.1 +Version: 3.0.0 Summary: Annotate AST trees with source code positions Home-page: https://github.com/gristlabs/asttokens Author: Dmitry Sagalovskiy, Grist Labs @@ -16,29 +16,23 @@ Classifier: Topic :: Software Development :: Interpreters Classifier: Topic :: Software Development :: Pre-processors Classifier: Environment :: Console Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.8 License-File: LICENSE -Requires-Dist: six >=1.12.0 -Requires-Dist: typing ; python_version < "3.5" Provides-Extra: astroid -Requires-Dist: astroid <2,>=1 ; (python_version < "3") and extra == 'astroid' -Requires-Dist: astroid <4,>=2 ; (python_version >= "3") and extra == 'astroid' +Requires-Dist: astroid<4,>=2; extra == "astroid" Provides-Extra: test -Requires-Dist: pytest ; extra == 'test' -Requires-Dist: astroid <2,>=1 ; (python_version < "3") and extra == 'test' -Requires-Dist: astroid <4,>=2 ; (python_version >= "3") and extra == 'test' +Requires-Dist: astroid<4,>=2; extra == "test" +Requires-Dist: pytest; extra == "test" +Requires-Dist: pytest-cov; extra == "test" +Requires-Dist: pytest-xdist; extra == "test" ASTTokens ========= @@ -74,7 +68,6 @@ The API Reference is here: http://asttokens.readthedocs.io/en/latest/api-index.h Usage ----- -ASTTokens works with both Python2 and Python3. ASTTokens can annotate both trees built by `ast <https://docs.python.org/2/library/ast.html>`_, AND those built by `astroid <https://github.com/PyCQA/astroid>`_. @@ -121,4 +114,5 @@ To contribute: 3. Run tests in your current interpreter with the command ``pytest`` or ``python -m pytest``. 4. Run tests across all supported interpreters with the ``tox`` command. You will need to have the interpreters installed separately. We recommend ``pyenv`` for that. Use ``tox -p auto`` to run the tests in parallel. -5. By default certain tests which take a very long time to run are skipped, but they are run on travis CI. To run them locally, set the environment variable ``ASTTOKENS_SLOW_TESTS``. For example run ``ASTTOKENS_SLOW_TESTS=1 tox`` to run the full suite of tests. +5. By default certain tests which take a very long time to run are skipped, but they are run in CI. + These are marked using the ``pytest`` marker ``slow`` and can be run on their own with ``pytest -m slow`` or as part of the full suite with ``pytest -m ''``. diff --git a/contrib/python/asttokens/README.rst b/contrib/python/asttokens/README.rst index edbf579be7f..a0ccfcaf953 100644 --- a/contrib/python/asttokens/README.rst +++ b/contrib/python/asttokens/README.rst @@ -32,7 +32,6 @@ The API Reference is here: http://asttokens.readthedocs.io/en/latest/api-index.h Usage ----- -ASTTokens works with both Python2 and Python3. ASTTokens can annotate both trees built by `ast <https://docs.python.org/2/library/ast.html>`_, AND those built by `astroid <https://github.com/PyCQA/astroid>`_. @@ -79,4 +78,5 @@ To contribute: 3. Run tests in your current interpreter with the command ``pytest`` or ``python -m pytest``. 4. Run tests across all supported interpreters with the ``tox`` command. You will need to have the interpreters installed separately. We recommend ``pyenv`` for that. Use ``tox -p auto`` to run the tests in parallel. -5. By default certain tests which take a very long time to run are skipped, but they are run on travis CI. To run them locally, set the environment variable ``ASTTOKENS_SLOW_TESTS``. For example run ``ASTTOKENS_SLOW_TESTS=1 tox`` to run the full suite of tests. +5. By default certain tests which take a very long time to run are skipped, but they are run in CI. + These are marked using the ``pytest`` marker ``slow`` and can be run on their own with ``pytest -m slow`` or as part of the full suite with ``pytest -m ''``. diff --git a/contrib/python/asttokens/asttokens/asttokens.py b/contrib/python/asttokens/asttokens/asttokens.py index d2b902da4c4..b537786ef8c 100644 --- a/contrib/python/asttokens/asttokens/asttokens.py +++ b/contrib/python/asttokens/asttokens/asttokens.py @@ -20,9 +20,6 @@ import token from ast import Module from typing import Iterable, Iterator, List, Optional, Tuple, Any, cast, TYPE_CHECKING -import six -from six.moves import xrange # pylint: disable=redefined-builtin - from .line_numbers import LineNumbers from .util import ( Token, match_token, is_non_coding_token, patched_generate_tokens, last_stmt, @@ -33,18 +30,14 @@ if TYPE_CHECKING: # pragma: no cover from .util import AstNode, TokenInfo -class ASTTextBase(six.with_metaclass(abc.ABCMeta, object)): - def __init__(self, source_text, filename): - # type: (Any, str) -> None - # FIXME: Strictly, the type of source_text is one of the six string types, but hard to specify with mypy given - # https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases - +class ASTTextBase(metaclass=abc.ABCMeta): + def __init__(self, source_text: str, filename: str) -> None: self._filename = filename # Decode source after parsing to let Python 2 handle coding declarations. # (If the encoding was not utf-8 compatible, then even if it parses correctly, # we'll fail with a unicode error here.) - source_text = six.ensure_text(source_text) + source_text = str(source_text) self._text = source_text self._line_numbers = LineNumbers(source_text) @@ -89,7 +82,7 @@ class ASTTextBase(six.with_metaclass(abc.ABCMeta, object)): return self._text[start: end] -class ASTTokens(ASTTextBase, object): +class ASTTokens(ASTTextBase): """ ASTTokens maintains the text of Python code in several forms: as a string, as line numbers, and as tokens, and is used to mark and access token and position information. @@ -111,9 +104,6 @@ class ASTTokens(ASTTextBase, object): def __init__(self, source_text, parse=False, tree=None, filename='<unknown>', tokens=None): # type: (Any, bool, Optional[Module], str, Iterable[TokenInfo]) -> None - # FIXME: Strictly, the type of source_text is one of the six string types, but hard to specify with mypy given - # https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases - super(ASTTokens, self).__init__(source_text, filename) self._tree = ast.parse(source_text, filename) if parse else tree @@ -134,7 +124,7 @@ class ASTTokens(ASTTextBase, object): """ Given the root of the AST or Astroid tree produced from source_text, visits all nodes marking them with token and position information by adding ``.first_token`` and - ``.last_token``attributes. This is done automatically in the constructor when ``parse`` or + ``.last_token`` attributes. This is done automatically in the constructor when ``parse`` or ``tree`` arguments are set, but may be used manually with a separate AST or Astroid tree. """ # The hard work of this class is done by MarkTokens @@ -249,7 +239,7 @@ class ASTTokens(ASTTextBase, object): Yields all tokens in order from first_token through and including last_token. If include_extra is True, includes non-coding tokens such as tokenize.NL and .COMMENT. """ - for i in xrange(first_token.index, last_token.index + 1): + for i in range(first_token.index, last_token.index + 1): if include_extra or not is_non_coding_token(self._tokens[i].type): yield self._tokens[i] @@ -284,7 +274,7 @@ class ASTTokens(ASTTextBase, object): return start, end -class ASTText(ASTTextBase, object): +class ASTText(ASTTextBase): """ Supports the same ``get_text*`` methods as ``ASTTokens``, but uses the AST to determine the text positions instead of tokens. @@ -299,9 +289,6 @@ class ASTText(ASTTextBase, object): """ def __init__(self, source_text, tree=None, filename='<unknown>'): # type: (Any, Optional[Module], str) -> None - # FIXME: Strictly, the type of source_text is one of the six string types, but hard to specify with mypy given - # https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases - super(ASTText, self).__init__(source_text, filename) self._tree = tree @@ -334,10 +321,6 @@ class ASTText(ASTTextBase, object): """ Version of ``get_text_positions()`` that doesn't use tokens. """ - if sys.version_info[:2] < (3, 8): # pragma: no cover - # This is just for mpypy - raise AssertionError("This method should only be called internally after checking supports_tokenless()") - if is_module(node): # Modules don't have position info, so just return the range of the whole text. # The token-using method does something different, but its behavior seems weird and inconsistent. @@ -420,16 +403,14 @@ class ASTText(ASTTextBase, object): return self.asttokens.get_text_positions(node, padded) -# Node types that _get_text_positions_tokenless doesn't support. Only relevant for Python 3.8+. -_unsupported_tokenless_types = () # type: Tuple[str, ...] -if sys.version_info[:2] >= (3, 8): - # no lineno - _unsupported_tokenless_types += ("arguments", "Arguments", "withitem") - if sys.version_info[:2] == (3, 8): - # _get_text_positions_tokenless works incorrectly for these types due to bugs in Python 3.8. - _unsupported_tokenless_types += ("arg", "Starred") - # no lineno in 3.8 - _unsupported_tokenless_types += ("Slice", "ExtSlice", "Index", "keyword") +# Node types that _get_text_positions_tokenless doesn't support. +# These initial values are missing lineno. +_unsupported_tokenless_types = ("arguments", "Arguments", "withitem") # type: Tuple[str, ...] +if sys.version_info[:2] == (3, 8): + # _get_text_positions_tokenless works incorrectly for these types due to bugs in Python 3.8. + _unsupported_tokenless_types += ("arg", "Starred") + # no lineno in 3.8 + _unsupported_tokenless_types += ("Slice", "ExtSlice", "Index", "keyword") def supports_tokenless(node=None): @@ -441,7 +422,6 @@ def supports_tokenless(node=None): The following cases are not supported: - - Python 3.7 and earlier - PyPy - ``ast.arguments`` / ``astroid.Arguments`` - ``ast.withitem`` @@ -466,6 +446,5 @@ def supports_tokenless(node=None): ) ) ) - and sys.version_info[:2] >= (3, 8) and 'pypy' not in sys.version.lower() ) diff --git a/contrib/python/asttokens/asttokens/line_numbers.py b/contrib/python/asttokens/asttokens/line_numbers.py index aaf76cef6d0..745b9f8a482 100644 --- a/contrib/python/asttokens/asttokens/line_numbers.py +++ b/contrib/python/asttokens/asttokens/line_numbers.py @@ -18,7 +18,7 @@ from typing import Dict, List, Tuple _line_start_re = re.compile(r'^', re.M) -class LineNumbers(object): +class LineNumbers: """ Class to convert between character offsets in a text string, and pairs (line, column) of 1-based line and 0-based column numbers, as used by tokens and AST nodes. diff --git a/contrib/python/asttokens/asttokens/mark_tokens.py b/contrib/python/asttokens/asttokens/mark_tokens.py index f5a8ac41cac..f866b1cbcba 100644 --- a/contrib/python/asttokens/asttokens/mark_tokens.py +++ b/contrib/python/asttokens/asttokens/mark_tokens.py @@ -19,11 +19,8 @@ import token from ast import Module from typing import Callable, List, Union, cast, Optional, Tuple, TYPE_CHECKING -import six - from . import util from .asttokens import ASTTokens -from .util import AstConstant from .astroid_compat import astroid_node_classes as nc, BaseContainer as AstroidBaseContainer if TYPE_CHECKING: @@ -44,7 +41,7 @@ _matching_pairs_right = { } -class MarkTokens(object): +class MarkTokens: """ Helper that visits all nodes in the AST tree and assigns .first_token and .last_token attributes to each of them. This is the heart of the token-marking logic. @@ -179,23 +176,6 @@ class MarkTokens(object): util.expect_token(before, token.OP, open_brace) return (before, last_token) - # Python 3.8 fixed the starting position of list comprehensions: - # https://bugs.python.org/issue31241 - if sys.version_info < (3, 8): - def visit_listcomp(self, node, first_token, last_token): - # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token] - return self.handle_comp('[', node, first_token, last_token) - - if six.PY2: - # We shouldn't do this on PY3 because its SetComp/DictComp already have a correct start. - def visit_setcomp(self, node, first_token, last_token): - # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token] - return self.handle_comp('{', node, first_token, last_token) - - def visit_dictcomp(self, node, first_token, last_token): - # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token] - return self.handle_comp('{', node, first_token, last_token) - def visit_comprehension(self, node, # type: AstNode first_token, # type: util.Token @@ -308,26 +288,19 @@ class MarkTokens(object): last_token = maybe_comma return (first_token, last_token) - if sys.version_info >= (3, 8): - # In Python3.8 parsed tuples include parentheses when present. - def handle_tuple_nonempty(self, node, first_token, last_token): - # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token] - assert isinstance(node, ast.Tuple) or isinstance(node, AstroidBaseContainer) - # It's a bare tuple if the first token belongs to the first child. The first child may - # include extraneous parentheses (which don't create new nodes), so account for those too. - child = node.elts[0] - if TYPE_CHECKING: - child = cast(AstNode, child) - child_first, child_last = self._gobble_parens(child.first_token, child.last_token, True) - if first_token == child_first: - return self.handle_bare_tuple(node, first_token, last_token) - return (first_token, last_token) - else: - # Before python 3.8, parsed tuples do not include parens. - def handle_tuple_nonempty(self, node, first_token, last_token): - # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token] - (first_token, last_token) = self.handle_bare_tuple(node, first_token, last_token) - return self._gobble_parens(first_token, last_token, False) + # In Python3.8 parsed tuples include parentheses when present. + def handle_tuple_nonempty(self, node, first_token, last_token): + # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token] + assert isinstance(node, ast.Tuple) or isinstance(node, AstroidBaseContainer) + # It's a bare tuple if the first token belongs to the first child. The first child may + # include extraneous parentheses (which don't create new nodes), so account for those too. + child = node.elts[0] + if TYPE_CHECKING: + child = cast(AstNode, child) + child_first, child_last = self._gobble_parens(child.first_token, child.last_token, True) + if first_token == child_first: + return self.handle_bare_tuple(node, first_token, last_token) + return (first_token, last_token) def visit_tuple(self, node, first_token, last_token): # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token] @@ -429,19 +402,15 @@ class MarkTokens(object): # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token] return self.handle_num(node, cast(ast.Num, node).n, first_token, last_token) - # In Astroid, the Num and Str nodes are replaced by Const. def visit_const(self, node, first_token, last_token): # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token] - assert isinstance(node, AstConstant) or isinstance(node, nc.Const) + assert isinstance(node, ast.Constant) or isinstance(node, nc.Const) if isinstance(node.value, numbers.Number): return self.handle_num(node, node.value, first_token, last_token) - elif isinstance(node.value, (six.text_type, six.binary_type)): + elif isinstance(node.value, (str, bytes)): return self.visit_str(node, first_token, last_token) return (first_token, last_token) - # In Python >= 3.6, there is a similar class 'Constant' for literals - # In 3.8 it became the type produced by ast.parse - # https://bugs.python.org/issue32892 visit_constant = visit_const def visit_keyword(self, node, first_token, last_token): @@ -473,13 +442,6 @@ class MarkTokens(object): first_token = last_token = self._code.prev_token(colon) return (first_token, last_token) - if six.PY2: - # No need for this on Python3, which already handles 'with' nodes correctly. - def visit_with(self, node, first_token, last_token): - # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token] - first = self._code.find_token(first_token, token.NAME, 'with', reverse=True) - return (first, last_token) - # Async nodes should typically start with the word 'async' # but Python < 3.7 doesn't put the col_offset there # AsyncFunctionDef is slightly different because it might have diff --git a/contrib/python/asttokens/asttokens/util.py b/contrib/python/asttokens/asttokens/util.py index cbd309309d7..df3e729b2b2 100644 --- a/contrib/python/asttokens/asttokens/util.py +++ b/contrib/python/asttokens/asttokens/util.py @@ -20,10 +20,21 @@ import token import tokenize from abc import ABCMeta from ast import Module, expr, AST -from typing import Callable, Dict, Iterable, Iterator, List, Optional, Tuple, Union, cast, Any, TYPE_CHECKING - -from six import iteritems - +from functools import lru_cache +from typing import ( + Callable, + Dict, + Iterable, + Iterator, + List, + Optional, + Tuple, + Union, + cast, + Any, + TYPE_CHECKING, + Type, +) if TYPE_CHECKING: # pragma: no cover from .astroid_compat import NodeNG @@ -38,10 +49,7 @@ if TYPE_CHECKING: # pragma: no cover AstNode = Union[EnhancedAST, NodeNG] - if sys.version_info[0] == 2: - TokenInfo = Tuple[int, str, Tuple[int, int], Tuple[int, int], str] - else: - TokenInfo = tokenize.TokenInfo + TokenInfo = tokenize.TokenInfo def token_repr(tok_type, string): @@ -70,13 +78,6 @@ class Token(collections.namedtuple('Token', 'type string start end line index st return token_repr(self.type, self.string) -if sys.version_info >= (3, 6): - AstConstant = ast.Constant -else: - class AstConstant: - value = object() - - def match_token(token, tok_type, tok_str=None): # type: (Token, int, Optional[str]) -> bool """Returns true if token is of the given type and, if a string is given, has that string.""" @@ -94,22 +95,13 @@ def expect_token(token, tok_type, tok_str=None): token_repr(tok_type, tok_str), str(token), token.start[0], token.start[1] + 1)) -# These were previously defined in tokenize.py and distinguishable by being greater than -# token.N_TOKEN. As of python3.7, they are in token.py, and we check for them explicitly. -if sys.version_info >= (3, 7): - def is_non_coding_token(token_type): - # type: (int) -> bool - """ - These are considered non-coding tokens, as they don't affect the syntax tree. - """ - return token_type in (token.NL, token.COMMENT, token.ENCODING) -else: - def is_non_coding_token(token_type): - # type: (int) -> bool - """ - These are considered non-coding tokens, as they don't affect the syntax tree. - """ - return token_type >= token.N_TOKENS + +def is_non_coding_token(token_type): + # type: (int) -> bool + """ + These are considered non-coding tokens, as they don't affect the syntax tree. + """ + return token_type in (token.NL, token.COMMENT, token.ENCODING) def generate_tokens(text): @@ -141,7 +133,7 @@ def iter_children_astroid(node, include_joined_str=False): return node.get_children() -SINGLETONS = {c for n, c in iteritems(ast.__dict__) if isinstance(c, type) and +SINGLETONS = {c for n, c in ast.__dict__.items() if isinstance(c, type) and issubclass(c, (ast.expr_context, ast.boolop, ast.operator, ast.unaryop, ast.cmpop))} @@ -167,9 +159,9 @@ def iter_children_ast(node, include_joined_str=False): yield child -stmt_class_names = {n for n, c in iteritems(ast.__dict__) +stmt_class_names = {n for n, c in ast.__dict__.items() if isinstance(c, type) and issubclass(c, ast.stmt)} -expr_class_names = ({n for n, c in iteritems(ast.__dict__) +expr_class_names = ({n for n, c in ast.__dict__.items() if isinstance(c, type) and issubclass(c, ast.expr)} | {'AssignName', 'DelName', 'Const', 'AssignAttr', 'DelAttr'}) @@ -198,6 +190,33 @@ def is_joined_str(node): return node.__class__.__name__ == 'JoinedStr' +def is_expr_stmt(node): + # type: (AstNode) -> bool + """Returns whether node is an `Expr` node, which is a statement that is an expression.""" + return node.__class__.__name__ == 'Expr' + + + +CONSTANT_CLASSES: Tuple[Type, ...] = (ast.Constant,) +try: + from astroid import Const + CONSTANT_CLASSES += (Const,) +except ImportError: # pragma: no cover + # astroid is not available + pass + +def is_constant(node): + # type: (AstNode) -> bool + """Returns whether node is a Constant node.""" + return isinstance(node, CONSTANT_CLASSES) + + +def is_ellipsis(node): + # type: (AstNode) -> bool + """Returns whether node is an Ellipsis node.""" + return is_constant(node) and node.value is Ellipsis # type: ignore + + def is_starred(node): # type: (AstNode) -> bool """Returns whether node is a starred expression node.""" @@ -322,7 +341,7 @@ def replace(text, replacements): return ''.join(parts) -class NodeMethods(object): +class NodeMethods: """ Helper to get `visit_{node_type}` methods given a node's class and cache the results. """ @@ -344,14 +363,7 @@ class NodeMethods(object): return method -if sys.version_info[0] == 2: - # Python 2 doesn't support non-ASCII identifiers, and making the real patched_generate_tokens support Python 2 - # means working with raw tuples instead of tokenize.TokenInfo namedtuples. - def patched_generate_tokens(original_tokens): - # type: (Iterable[TokenInfo]) -> Iterator[TokenInfo] - return iter(original_tokens) -else: - def patched_generate_tokens(original_tokens): +def patched_generate_tokens(original_tokens): # type: (Iterable[TokenInfo]) -> Iterator[TokenInfo] """ Fixes tokens yielded by `tokenize.generate_tokens` to handle more non-ASCII characters in identifiers. @@ -376,7 +388,7 @@ else: for combined_token in combine_tokens(group): yield combined_token - def combine_tokens(group): +def combine_tokens(group): # type: (List[tokenize.TokenInfo]) -> List[tokenize.TokenInfo] if not any(tok.type == tokenize.ERRORTOKEN for tok in group) or len({tok.line for tok in group}) != 1: return group @@ -413,72 +425,61 @@ def last_stmt(node): return node -if sys.version_info[:2] >= (3, 8): - from functools import lru_cache - @lru_cache(maxsize=None) - def fstring_positions_work(): - # type: () -> bool - """ - The positions attached to nodes inside f-string FormattedValues have some bugs - that were fixed in Python 3.9.7 in https://github.com/python/cpython/pull/27729. - This checks for those bugs more concretely without relying on the Python version. - Specifically this checks: - - Values with a format spec or conversion - - Repeated (i.e. identical-looking) expressions - - f-strings implicitly concatenated over multiple lines. - - Multiline, triple-quoted f-strings. - """ - source = """( - f"a {b}{b} c {d!r} e {f:g} h {i:{j}} k {l:{m:n}}" - f"a {b}{b} c {d!r} e {f:g} h {i:{j}} k {l:{m:n}}" - f"{x + y + z} {x} {y} {z} {z} {z!a} {z:z}" - f''' - {s} {t} - {u} {v} - ''' - )""" - tree = ast.parse(source) - name_nodes = [node for node in ast.walk(tree) if isinstance(node, ast.Name)] - name_positions = [(node.lineno, node.col_offset) for node in name_nodes] - positions_are_unique = len(set(name_positions)) == len(name_positions) - correct_source_segments = all( - ast.get_source_segment(source, node) == node.id - for node in name_nodes - ) - return positions_are_unique and correct_source_segments +@lru_cache(maxsize=None) +def fstring_positions_work(): + # type: () -> bool + """ + The positions attached to nodes inside f-string FormattedValues have some bugs + that were fixed in Python 3.9.7 in https://github.com/python/cpython/pull/27729. + This checks for those bugs more concretely without relying on the Python version. + Specifically this checks: + - Values with a format spec or conversion + - Repeated (i.e. identical-looking) expressions + - f-strings implicitly concatenated over multiple lines. + - Multiline, triple-quoted f-strings. + """ + source = """( + f"a {b}{b} c {d!r} e {f:g} h {i:{j}} k {l:{m:n}}" + f"a {b}{b} c {d!r} e {f:g} h {i:{j}} k {l:{m:n}}" + f"{x + y + z} {x} {y} {z} {z} {z!a} {z:z}" + f''' + {s} {t} + {u} {v} + ''' + )""" + tree = ast.parse(source) + name_nodes = [node for node in ast.walk(tree) if isinstance(node, ast.Name)] + name_positions = [(node.lineno, node.col_offset) for node in name_nodes] + positions_are_unique = len(set(name_positions)) == len(name_positions) + correct_source_segments = all( + ast.get_source_segment(source, node) == node.id + for node in name_nodes + ) + return positions_are_unique and correct_source_segments - def annotate_fstring_nodes(tree): - # type: (ast.AST) -> None - """ - Add a special attribute `_broken_positions` to nodes inside f-strings - if the lineno/col_offset cannot be trusted. - """ - if sys.version_info >= (3, 12): - # f-strings were weirdly implemented until https://peps.python.org/pep-0701/ - # In Python 3.12, inner nodes have sensible positions. - return - for joinedstr in walk(tree, include_joined_str=True): - if not isinstance(joinedstr, ast.JoinedStr): - continue - for part in joinedstr.values: - # The ast positions of the FormattedValues/Constant nodes span the full f-string, which is weird. - setattr(part, '_broken_positions', True) # use setattr for mypy - - if isinstance(part, ast.FormattedValue): - if not fstring_positions_work(): - for child in walk(part.value): - setattr(child, '_broken_positions', True) - - if part.format_spec: # this is another JoinedStr - # Again, the standard positions span the full f-string. - setattr(part.format_spec, '_broken_positions', True) - -else: - def fstring_positions_work(): - # type: () -> bool - return False - - def annotate_fstring_nodes(_tree): - # type: (ast.AST) -> None - pass +def annotate_fstring_nodes(tree): + # type: (ast.AST) -> None + """ + Add a special attribute `_broken_positions` to nodes inside f-strings + if the lineno/col_offset cannot be trusted. + """ + if sys.version_info >= (3, 12): + # f-strings were weirdly implemented until https://peps.python.org/pep-0701/ + # In Python 3.12, inner nodes have sensible positions. + return + for joinedstr in walk(tree, include_joined_str=True): + if not isinstance(joinedstr, ast.JoinedStr): + continue + for part in joinedstr.values: + # The ast positions of the FormattedValues/Constant nodes span the full f-string, which is weird. + setattr(part, '_broken_positions', True) # use setattr for mypy + + if isinstance(part, ast.FormattedValue): + if not fstring_positions_work(): + for child in walk(part.value): + setattr(child, '_broken_positions', True) + + if part.format_spec: # this is another JoinedStr + # Again, the standard positions span the full f-string. + setattr(part.format_spec, '_broken_positions', True) diff --git a/contrib/python/asttokens/asttokens/version.py b/contrib/python/asttokens/asttokens/version.py index 54499df3479..528787cfc8a 100644 --- a/contrib/python/asttokens/asttokens/version.py +++ b/contrib/python/asttokens/asttokens/version.py @@ -1 +1 @@ -__version__ = "2.4.1" +__version__ = "3.0.0" diff --git a/contrib/python/asttokens/ya.make b/contrib/python/asttokens/ya.make index 5ade3ab2f66..f91b3645a35 100644 --- a/contrib/python/asttokens/ya.make +++ b/contrib/python/asttokens/ya.make @@ -2,14 +2,10 @@ PY3_LIBRARY() -VERSION(2.4.1) +VERSION(3.0.0) LICENSE(Apache-2.0) -PEERDIR( - contrib/python/six -) - NO_LINT() PY_SRCS( |
