diff options
author | ilezhankin <ilezhankin@yandex-team.ru> | 2022-02-10 16:45:56 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:45:56 +0300 |
commit | 62a805381e41500fbc7914c37c71ab040a098f4e (patch) | |
tree | 1a2c5ffcf89eb53ecd79dbc9bc0a195c27404d0c /contrib/python/Pygments/py3/pygments/lexers/urbi.py | |
parent | 1d125034f06575234f83f24f08677955133f140e (diff) | |
download | ydb-62a805381e41500fbc7914c37c71ab040a098f4e.tar.gz |
Restoring authorship annotation for <ilezhankin@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/python/Pygments/py3/pygments/lexers/urbi.py')
-rw-r--r-- | contrib/python/Pygments/py3/pygments/lexers/urbi.py | 258 |
1 files changed, 129 insertions, 129 deletions
diff --git a/contrib/python/Pygments/py3/pygments/lexers/urbi.py b/contrib/python/Pygments/py3/pygments/lexers/urbi.py index 2167b72658..d9c1c9f82c 100644 --- a/contrib/python/Pygments/py3/pygments/lexers/urbi.py +++ b/contrib/python/Pygments/py3/pygments/lexers/urbi.py @@ -1,135 +1,135 @@ -""" - pygments.lexers.urbi - ~~~~~~~~~~~~~~~~~~~~ - - Lexers for UrbiScript language. - +""" + pygments.lexers.urbi + ~~~~~~~~~~~~~~~~~~~~ + + Lexers for UrbiScript language. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import re - -from pygments.lexer import ExtendedRegexLexer, words -from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation - -__all__ = ['UrbiscriptLexer'] - - -class UrbiscriptLexer(ExtendedRegexLexer): - """ - For UrbiScript source code. - - .. versionadded:: 1.5 - """ - - name = 'UrbiScript' - aliases = ['urbiscript'] - filenames = ['*.u'] - mimetypes = ['application/x-urbiscript'] - - flags = re.DOTALL - - # TODO - # - handle Experimental and deprecated tags with specific tokens - # - handle Angles and Durations with specific tokens - - def blob_callback(lexer, match, ctx): - text_before_blob = match.group(1) - blob_start = match.group(2) - blob_size_str = match.group(3) - blob_size = int(blob_size_str) - yield match.start(), String, text_before_blob - ctx.pos += len(text_before_blob) - - # if blob size doesn't match blob format (example : "\B(2)(aaa)") - # yield blob as a string - if ctx.text[match.end() + blob_size] != ")": - result = "\\B(" + blob_size_str + ")(" - yield match.start(), String, result - ctx.pos += len(result) - return - - # if blob is well formated, yield as Escape - blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")" - yield match.start(), String.Escape, blob_text - ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")" - - tokens = { - 'root': [ - (r'\s+', Text), - # comments - (r'//.*?\n', Comment), - (r'/\*', Comment.Multiline, 'comment'), - (r'(every|for|loop|while)(?:;|&|\||,)', Keyword), - (words(( - 'assert', 'at', 'break', 'case', 'catch', 'closure', 'compl', - 'continue', 'default', 'else', 'enum', 'every', 'external', - 'finally', 'for', 'freezeif', 'if', 'new', 'onleave', 'return', - 'stopif', 'switch', 'this', 'throw', 'timeout', 'try', - 'waituntil', 'whenever', 'while'), suffix=r'\b'), - Keyword), - (words(( - 'asm', 'auto', 'bool', 'char', 'const_cast', 'delete', 'double', - 'dynamic_cast', 'explicit', 'export', 'extern', 'float', 'friend', - 'goto', 'inline', 'int', 'long', 'mutable', 'namespace', 'register', - 'reinterpret_cast', 'short', 'signed', 'sizeof', 'static_cast', - 'struct', 'template', 'typedef', 'typeid', 'typename', 'union', - 'unsigned', 'using', 'virtual', 'volatile', 'wchar_t'), suffix=r'\b'), - Keyword.Reserved), - # deprecated keywords, use a meaningfull token when available - (r'(emit|foreach|internal|loopn|static)\b', Keyword), - # ignored keywords, use a meaningfull token when available - (r'(private|protected|public)\b', Keyword), - (r'(var|do|const|function|class)\b', Keyword.Declaration), - (r'(true|false|nil|void)\b', Keyword.Constant), - (words(( - 'Barrier', 'Binary', 'Boolean', 'CallMessage', 'Channel', 'Code', - 'Comparable', 'Container', 'Control', 'Date', 'Dictionary', 'Directory', - 'Duration', 'Enumeration', 'Event', 'Exception', 'Executable', 'File', - 'Finalizable', 'Float', 'FormatInfo', 'Formatter', 'Global', 'Group', - 'Hash', 'InputStream', 'IoService', 'Job', 'Kernel', 'Lazy', 'List', - 'Loadable', 'Lobby', 'Location', 'Logger', 'Math', 'Mutex', 'nil', - 'Object', 'Orderable', 'OutputStream', 'Pair', 'Path', 'Pattern', - 'Position', 'Primitive', 'Process', 'Profile', 'PseudoLazy', 'PubSub', - 'RangeIterable', 'Regexp', 'Semaphore', 'Server', 'Singleton', 'Socket', - 'StackFrame', 'Stream', 'String', 'System', 'Tag', 'Timeout', - 'Traceable', 'TrajectoryGenerator', 'Triplet', 'Tuple', 'UObject', - 'UValue', 'UVar'), suffix=r'\b'), - Name.Builtin), - (r'(?:this)\b', Name.Builtin.Pseudo), - # don't match single | and & - (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator), - (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b', - Operator.Word), - (r'[{}\[\]()]+', Punctuation), - (r'(?:;|\||,|&|\?|!)+', Punctuation), - (r'[$a-zA-Z_]\w*', Name.Other), - (r'0x[0-9a-fA-F]+', Number.Hex), - # Float, Integer, Angle and Duration - (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?' - r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float), - # handle binary blob in strings - (r'"', String.Double, "string.double"), - (r"'", String.Single, "string.single"), - ], - 'string.double': [ - (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback), + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import ExtendedRegexLexer, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['UrbiscriptLexer'] + + +class UrbiscriptLexer(ExtendedRegexLexer): + """ + For UrbiScript source code. + + .. versionadded:: 1.5 + """ + + name = 'UrbiScript' + aliases = ['urbiscript'] + filenames = ['*.u'] + mimetypes = ['application/x-urbiscript'] + + flags = re.DOTALL + + # TODO + # - handle Experimental and deprecated tags with specific tokens + # - handle Angles and Durations with specific tokens + + def blob_callback(lexer, match, ctx): + text_before_blob = match.group(1) + blob_start = match.group(2) + blob_size_str = match.group(3) + blob_size = int(blob_size_str) + yield match.start(), String, text_before_blob + ctx.pos += len(text_before_blob) + + # if blob size doesn't match blob format (example : "\B(2)(aaa)") + # yield blob as a string + if ctx.text[match.end() + blob_size] != ")": + result = "\\B(" + blob_size_str + ")(" + yield match.start(), String, result + ctx.pos += len(result) + return + + # if blob is well formated, yield as Escape + blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")" + yield match.start(), String.Escape, blob_text + ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")" + + tokens = { + 'root': [ + (r'\s+', Text), + # comments + (r'//.*?\n', Comment), + (r'/\*', Comment.Multiline, 'comment'), + (r'(every|for|loop|while)(?:;|&|\||,)', Keyword), + (words(( + 'assert', 'at', 'break', 'case', 'catch', 'closure', 'compl', + 'continue', 'default', 'else', 'enum', 'every', 'external', + 'finally', 'for', 'freezeif', 'if', 'new', 'onleave', 'return', + 'stopif', 'switch', 'this', 'throw', 'timeout', 'try', + 'waituntil', 'whenever', 'while'), suffix=r'\b'), + Keyword), + (words(( + 'asm', 'auto', 'bool', 'char', 'const_cast', 'delete', 'double', + 'dynamic_cast', 'explicit', 'export', 'extern', 'float', 'friend', + 'goto', 'inline', 'int', 'long', 'mutable', 'namespace', 'register', + 'reinterpret_cast', 'short', 'signed', 'sizeof', 'static_cast', + 'struct', 'template', 'typedef', 'typeid', 'typename', 'union', + 'unsigned', 'using', 'virtual', 'volatile', 'wchar_t'), suffix=r'\b'), + Keyword.Reserved), + # deprecated keywords, use a meaningfull token when available + (r'(emit|foreach|internal|loopn|static)\b', Keyword), + # ignored keywords, use a meaningfull token when available + (r'(private|protected|public)\b', Keyword), + (r'(var|do|const|function|class)\b', Keyword.Declaration), + (r'(true|false|nil|void)\b', Keyword.Constant), + (words(( + 'Barrier', 'Binary', 'Boolean', 'CallMessage', 'Channel', 'Code', + 'Comparable', 'Container', 'Control', 'Date', 'Dictionary', 'Directory', + 'Duration', 'Enumeration', 'Event', 'Exception', 'Executable', 'File', + 'Finalizable', 'Float', 'FormatInfo', 'Formatter', 'Global', 'Group', + 'Hash', 'InputStream', 'IoService', 'Job', 'Kernel', 'Lazy', 'List', + 'Loadable', 'Lobby', 'Location', 'Logger', 'Math', 'Mutex', 'nil', + 'Object', 'Orderable', 'OutputStream', 'Pair', 'Path', 'Pattern', + 'Position', 'Primitive', 'Process', 'Profile', 'PseudoLazy', 'PubSub', + 'RangeIterable', 'Regexp', 'Semaphore', 'Server', 'Singleton', 'Socket', + 'StackFrame', 'Stream', 'String', 'System', 'Tag', 'Timeout', + 'Traceable', 'TrajectoryGenerator', 'Triplet', 'Tuple', 'UObject', + 'UValue', 'UVar'), suffix=r'\b'), + Name.Builtin), + (r'(?:this)\b', Name.Builtin.Pseudo), + # don't match single | and & + (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator), + (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b', + Operator.Word), + (r'[{}\[\]()]+', Punctuation), + (r'(?:;|\||,|&|\?|!)+', Punctuation), + (r'[$a-zA-Z_]\w*', Name.Other), + (r'0x[0-9a-fA-F]+', Number.Hex), + # Float, Integer, Angle and Duration + (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?' + r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float), + # handle binary blob in strings + (r'"', String.Double, "string.double"), + (r"'", String.Single, "string.single"), + ], + 'string.double': [ + (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback), (r'(\\\\|\\[^\\]|[^"\\])*?"', String.Double, '#pop'), - ], - 'string.single': [ - (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback), + ], + 'string.single': [ + (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback), (r"(\\\\|\\[^\\]|[^'\\])*?'", String.Single, '#pop'), - ], - # from http://pygments.org/docs/lexerdevelopment/#changing-states - 'comment': [ - (r'[^*/]', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]', Comment.Multiline), - ] - } + ], + # from http://pygments.org/docs/lexerdevelopment/#changing-states + 'comment': [ + (r'[^*/]', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline), + ] + } def analyse_text(text): """This is fairly similar to C and others, but freezeif and |