aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/Pygments/py3/pygments/lexer.py
diff options
context:
space:
mode:
authorarcadia-devtools <arcadia-devtools@yandex-team.ru>2022-06-09 14:39:19 +0300
committerarcadia-devtools <arcadia-devtools@yandex-team.ru>2022-06-09 14:39:19 +0300
commitc04b663c7bb4b750deeb8f48f620497ec13da8fa (patch)
tree151ebc8bfdd2ad918caf5e6e2d8013e14272ddf8 /contrib/python/Pygments/py3/pygments/lexer.py
parent0d55ca22c507d18c2f35718687e0b06d9915397b (diff)
downloadydb-c04b663c7bb4b750deeb8f48f620497ec13da8fa.tar.gz
intermediate changes
ref:2d4f292087954c9344efdabb7b2a67f466263c65
Diffstat (limited to 'contrib/python/Pygments/py3/pygments/lexer.py')
-rw-r--r--contrib/python/Pygments/py3/pygments/lexer.py11
1 files changed, 7 insertions, 4 deletions
diff --git a/contrib/python/Pygments/py3/pygments/lexer.py b/contrib/python/Pygments/py3/pygments/lexer.py
index 33d738a8d6..27d73ee1c3 100644
--- a/contrib/python/Pygments/py3/pygments/lexer.py
+++ b/contrib/python/Pygments/py3/pygments/lexer.py
@@ -4,7 +4,7 @@
Base lexer classes.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -76,6 +76,9 @@ class Lexer(metaclass=LexerMeta):
#: Name of the lexer
name = None
+ #: URL of the language specification/definition
+ url = None
+
#: Shortcuts for the lexer
aliases = []
@@ -618,7 +621,7 @@ class RegexLexer(Lexer, metaclass=RegexLexerMeta):
"""
Split ``text`` into (tokentype, text) pairs.
- ``stack`` is the inital stack (default: ``['root']``)
+ ``stack`` is the initial stack (default: ``['root']``)
"""
pos = 0
tokendefs = self._tokens
@@ -738,7 +741,7 @@ class ExtendedRegexLexer(RegexLexer):
elif isinstance(new_state, int):
# see RegexLexer for why this check is made
if abs(new_state) >= len(ctx.stack):
- del ctx.state[1:]
+ del ctx.stack[1:]
else:
del ctx.stack[new_state:]
elif new_state == '#push':
@@ -792,7 +795,7 @@ def do_insertions(insertions, tokens):
# iterate over the token stream where we want to insert
# the tokens from the insertion list.
for i, t, v in tokens:
- # first iteration. store the postition of first item
+ # first iteration. store the position of first item
if realpos is None:
realpos = i
oldi = 0