aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/ipython/py3/IPython/utils/tokenutil.py
diff options
context:
space:
mode:
authorrobot-piglet <robot-piglet@yandex-team.com>2023-11-14 19:18:07 +0300
committerrobot-piglet <robot-piglet@yandex-team.com>2023-11-14 20:20:53 +0300
commit874ef51d3d3edfa25f5a505ec6ab50e172965d1e (patch)
tree620fb5e02063d23509d3aa3df2215c099ccde0b7 /contrib/python/ipython/py3/IPython/utils/tokenutil.py
parente356b34d3b0399e2f170881af15c91e4db9e3d11 (diff)
downloadydb-874ef51d3d3edfa25f5a505ec6ab50e172965d1e.tar.gz
Intermediate changes
Diffstat (limited to 'contrib/python/ipython/py3/IPython/utils/tokenutil.py')
-rw-r--r--contrib/python/ipython/py3/IPython/utils/tokenutil.py19
1 files changed, 12 insertions, 7 deletions
diff --git a/contrib/python/ipython/py3/IPython/utils/tokenutil.py b/contrib/python/ipython/py3/IPython/utils/tokenutil.py
index 5fd8a1fbe1..6b99a58ae2 100644
--- a/contrib/python/ipython/py3/IPython/utils/tokenutil.py
+++ b/contrib/python/ipython/py3/IPython/utils/tokenutil.py
@@ -8,12 +8,14 @@ from io import StringIO
from keyword import iskeyword
import tokenize
+from tokenize import TokenInfo
+from typing import List, Optional
Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line'])
def generate_tokens(readline):
- """wrap generate_tokens to catch EOF errors"""
+ """wrap generate_tkens to catch EOF errors"""
try:
for token in tokenize.generate_tokens(readline):
yield token
@@ -22,7 +24,9 @@ def generate_tokens(readline):
return
-def generate_tokens_catch_errors(readline, extra_errors_to_catch=None):
+def generate_tokens_catch_errors(
+ readline, extra_errors_to_catch: Optional[List[str]] = None
+):
default_errors_to_catch = [
"unterminated string literal",
"invalid non-printable character",
@@ -31,7 +35,7 @@ def generate_tokens_catch_errors(readline, extra_errors_to_catch=None):
assert extra_errors_to_catch is None or isinstance(extra_errors_to_catch, list)
errors_to_catch = default_errors_to_catch + (extra_errors_to_catch or [])
- tokens = []
+ tokens: List[TokenInfo] = []
try:
for token in tokenize.generate_tokens(readline):
tokens.append(token)
@@ -84,7 +88,8 @@ def line_at_cursor(cell, cursor_pos=0):
line = ""
return (line, offset)
-def token_at_cursor(cell, cursor_pos=0):
+
+def token_at_cursor(cell: str, cursor_pos: int = 0):
"""Get the token at a given cursor
Used for introspection.
@@ -94,13 +99,13 @@ def token_at_cursor(cell, cursor_pos=0):
Parameters
----------
- cell : unicode
+ cell : str
A block of Python code
cursor_pos : int
The location of the cursor in the block where the token should be found
"""
- names = []
- tokens = []
+ names: List[str] = []
+ tokens: List[Token] = []
call_names = []
offsets = {1: 0} # lines start at 1