diff options
author | orivej <orivej@yandex-team.ru> | 2022-02-10 16:45:01 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:45:01 +0300 |
commit | 2d37894b1b037cf24231090eda8589bbb44fb6fc (patch) | |
tree | be835aa92c6248212e705f25388ebafcf84bc7a1 /contrib/python/Pygments/py3/pygments/lexers/configs.py | |
parent | 718c552901d703c502ccbefdfc3c9028d608b947 (diff) | |
download | ydb-2d37894b1b037cf24231090eda8589bbb44fb6fc.tar.gz |
Restoring authorship annotation for <orivej@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/python/Pygments/py3/pygments/lexers/configs.py')
-rw-r--r-- | contrib/python/Pygments/py3/pygments/lexers/configs.py | 222 |
1 files changed, 111 insertions, 111 deletions
diff --git a/contrib/python/Pygments/py3/pygments/lexers/configs.py b/contrib/python/Pygments/py3/pygments/lexers/configs.py index 871053d607..99fab14860 100644 --- a/contrib/python/Pygments/py3/pygments/lexers/configs.py +++ b/contrib/python/Pygments/py3/pygments/lexers/configs.py @@ -15,7 +15,7 @@ from pygments.lexer import ExtendedRegexLexer, RegexLexer, default, words, \ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Whitespace, Literal, Error, Generic from pygments.lexers.shell import BashLexer -from pygments.lexers.data import JsonLexer +from pygments.lexers.data import JsonLexer __all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer', 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer', @@ -48,9 +48,9 @@ class IniLexer(RegexLexer): (r'\[.*?\]$', Keyword), (r'(.*?)([ \t]*)(=)([ \t]*)([^\t\n]*)', bygroups(Name.Attribute, Whitespace, Operator, Whitespace, String)), - # standalone option, supported by some INI parsers - (r'(.+?)$', Name.Attribute), - ], + # standalone option, supported by some INI parsers + (r'(.+?)$', Name.Attribute), + ], } def analyse_text(text): @@ -108,8 +108,8 @@ class PropertiesLexer(RegexLexer): """ Lexer for configuration files in Java's properties format. - Note: trailing whitespace counts as part of the value as per spec - + Note: trailing whitespace counts as part of the value as per spec + .. versionadded:: 1.4 """ @@ -121,9 +121,9 @@ class PropertiesLexer(RegexLexer): tokens = { 'root': [ (r'^(\w+)([ \t])(\w+\s*)$', bygroups(Name.Attribute, Whitespace, String)), - (r'^\w+(\\[ \t]\w*)*$', Name.Attribute), + (r'^\w+(\\[ \t]\w*)*$', Name.Attribute), (r'(^ *)([#!].*)', bygroups(Whitespace, Comment)), - # More controversial comments + # More controversial comments (r'(^ *)((?:;|//).*)', bygroups(Whitespace, Comment)), (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)', bygroups(Name.Attribute, Whitespace, Operator, Whitespace, String)), @@ -471,7 +471,7 @@ class NginxConfLexer(RegexLexer): """ name = 'Nginx configuration file' aliases = ['nginx'] - filenames = ['nginx.conf'] + filenames = ['nginx.conf'] mimetypes = ['text/x-nginx-conf'] tokens = { @@ -549,7 +549,7 @@ class DockerLexer(RegexLexer): mimetypes = ['text/x-dockerfile-config'] _keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)') - _bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)') + _bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)') _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex flags = re.IGNORECASE | re.MULTILINE @@ -568,7 +568,7 @@ class DockerLexer(RegexLexer): (r'(%s|VOLUME)\b(\s+)(.*)' % (_keywords), bygroups(Keyword, Whitespace, String)), (r'(%s)(\s+)' % (_bash_keywords,), bygroups(Keyword, Whitespace)), (r'(.*\\\n)*.+', using(BashLexer)), - ] + ] } @@ -587,7 +587,7 @@ class TerraformLexer(ExtendedRegexLexer): classes = ('backend', 'data', 'module', 'output', 'provider', 'provisioner', 'resource', 'variable') classes_re = "({})".format(('|').join(classes)) - + types = ('string', 'number', 'bool', 'list', 'tuple', 'map', 'set', 'object', 'null') numeric_functions = ('abs', 'ceil', 'floor', 'log', 'max', @@ -759,8 +759,8 @@ class TermcapLexer(RegexLexer): .. versionadded:: 2.1 """ name = 'Termcap' - aliases = ['termcap'] - filenames = ['termcap', 'termcap.src'] + aliases = ['termcap'] + filenames = ['termcap', 'termcap.src'] mimetypes = [] # NOTE: @@ -771,14 +771,14 @@ class TermcapLexer(RegexLexer): tokens = { 'root': [ (r'^#.*', Comment), - (r'^[^\s#:|]+', Name.Tag, 'names'), + (r'^[^\s#:|]+', Name.Tag, 'names'), (r'\s+', Whitespace), ], 'names': [ (r'\n', Whitespace, '#pop'), (r':', Punctuation, 'defs'), (r'\|', Punctuation), - (r'[^:|]+', Name.Attribute), + (r'[^:|]+', Name.Attribute), ], 'defs': [ (r'(\\)(\n[ \t]*)', bygroups(Text, Whitespace)), @@ -806,8 +806,8 @@ class TerminfoLexer(RegexLexer): .. versionadded:: 2.1 """ name = 'Terminfo' - aliases = ['terminfo'] - filenames = ['terminfo', 'terminfo.src'] + aliases = ['terminfo'] + filenames = ['terminfo', 'terminfo.src'] mimetypes = [] # NOTE: @@ -818,14 +818,14 @@ class TerminfoLexer(RegexLexer): tokens = { 'root': [ (r'^#.*$', Comment), - (r'^[^\s#,|]+', Name.Tag, 'names'), + (r'^[^\s#,|]+', Name.Tag, 'names'), (r'\s+', Whitespace), ], 'names': [ (r'\n', Whitespace, '#pop'), (r'(,)([ \t]*)', bygroups(Punctuation, Whitespace), 'defs'), (r'\|', Punctuation), - (r'[^,|]+', Name.Attribute), + (r'[^,|]+', Name.Attribute), ], 'defs': [ (r'\n[ \t]+', Whitespace), @@ -854,8 +854,8 @@ class PkgConfigLexer(RegexLexer): """ name = 'PkgConfig' - aliases = ['pkgconfig'] - filenames = ['*.pc'] + aliases = ['pkgconfig'] + filenames = ['*.pc'] mimetypes = [] tokens = { @@ -923,8 +923,8 @@ class PacmanConfLexer(RegexLexer): """ name = 'PacmanConf' - aliases = ['pacmanconf'] - filenames = ['pacman.conf'] + aliases = ['pacmanconf'] + filenames = ['pacman.conf'] mimetypes = [] tokens = { @@ -952,108 +952,108 @@ class PacmanConfLexer(RegexLexer): '%u', # url ), suffix=r'\b'), Name.Variable), - + # fallback (r'\s+', Whitespace), (r'.', Text), ], } - - -class AugeasLexer(RegexLexer): - """ - Lexer for `Augeas <http://augeas.net>`_. - - .. versionadded:: 2.4 - """ - name = 'Augeas' - aliases = ['augeas'] - filenames = ['*.aug'] - - tokens = { - 'root': [ + + +class AugeasLexer(RegexLexer): + """ + Lexer for `Augeas <http://augeas.net>`_. + + .. versionadded:: 2.4 + """ + name = 'Augeas' + aliases = ['augeas'] + filenames = ['*.aug'] + + tokens = { + 'root': [ (r'(module)(\s*)([^\s=]+)', bygroups(Keyword.Namespace, Whitespace, Name.Namespace)), (r'(let)(\s*)([^\s=]+)', bygroups(Keyword.Declaration, Whitespace, Name.Variable)), (r'(del|store|value|counter|seq|key|label|autoload|incl|excl|transform|test|get|put)(\s+)', bygroups(Name.Builtin, Whitespace)), - (r'(\()([^:]+)(\:)(unit|string|regexp|lens|tree|filter)(\))', bygroups(Punctuation, Name.Variable, Punctuation, Keyword.Type, Punctuation)), - (r'\(\*', Comment.Multiline, 'comment'), - (r'[*+\-.;=?|]', Operator), - (r'[()\[\]{}]', Operator), - (r'"', String.Double, 'string'), - (r'\/', String.Regex, 'regex'), - (r'([A-Z]\w*)(\.)(\w+)', bygroups(Name.Namespace, Punctuation, Name.Variable)), - (r'.', Name.Variable), + (r'(\()([^:]+)(\:)(unit|string|regexp|lens|tree|filter)(\))', bygroups(Punctuation, Name.Variable, Punctuation, Keyword.Type, Punctuation)), + (r'\(\*', Comment.Multiline, 'comment'), + (r'[*+\-.;=?|]', Operator), + (r'[()\[\]{}]', Operator), + (r'"', String.Double, 'string'), + (r'\/', String.Regex, 'regex'), + (r'([A-Z]\w*)(\.)(\w+)', bygroups(Name.Namespace, Punctuation, Name.Variable)), + (r'.', Name.Variable), (r'\s+', Whitespace), - ], - 'string': [ - (r'\\.', String.Escape), - (r'[^"]', String.Double), - (r'"', String.Double, '#pop'), - ], - 'regex': [ - (r'\\.', String.Escape), - (r'[^/]', String.Regex), - (r'\/', String.Regex, '#pop'), - ], - 'comment': [ - (r'[^*)]', Comment.Multiline), - (r'\(\*', Comment.Multiline, '#push'), - (r'\*\)', Comment.Multiline, '#pop'), - (r'[)*]', Comment.Multiline) - ], - } - - -class TOMLLexer(RegexLexer): - """ - Lexer for `TOML <https://github.com/toml-lang/toml>`_, a simple language - for config files. - - .. versionadded:: 2.4 - """ - - name = 'TOML' - aliases = ['toml'] + ], + 'string': [ + (r'\\.', String.Escape), + (r'[^"]', String.Double), + (r'"', String.Double, '#pop'), + ], + 'regex': [ + (r'\\.', String.Escape), + (r'[^/]', String.Regex), + (r'\/', String.Regex, '#pop'), + ], + 'comment': [ + (r'[^*)]', Comment.Multiline), + (r'\(\*', Comment.Multiline, '#push'), + (r'\*\)', Comment.Multiline, '#pop'), + (r'[)*]', Comment.Multiline) + ], + } + + +class TOMLLexer(RegexLexer): + """ + Lexer for `TOML <https://github.com/toml-lang/toml>`_, a simple language + for config files. + + .. versionadded:: 2.4 + """ + + name = 'TOML' + aliases = ['toml'] filenames = ['*.toml', 'Pipfile', 'poetry.lock'] - - tokens = { - 'root': [ + + tokens = { + 'root': [ # Table (r'^(\s*)(\[.*?\])$', bygroups(Whitespace, Keyword)), - - # Basics, comments, strings + + # Basics, comments, strings (r'[ \t]+', Whitespace), (r'\n', Whitespace), - (r'#.*?$', Comment.Single), - # Basic string + (r'#.*?$', Comment.Single), + # Basic string (r'"(\\\\|\\[^\\]|[^"\\])*"', String), - # Literal string - (r'\'\'\'(.*)\'\'\'', String), - (r'\'[^\']*\'', String), - (r'(true|false)$', Keyword.Constant), - (r'[a-zA-Z_][\w\-]*', Name), - - # Datetime - # TODO this needs to be expanded, as TOML is rather flexible: - # https://github.com/toml-lang/toml#offset-date-time - (r'\d{4}-\d{2}-\d{2}(?:T| )\d{2}:\d{2}:\d{2}(?:Z|[-+]\d{2}:\d{2})', Number.Integer), - - # Numbers - (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float), - (r'\d+[eE][+-]?[0-9]+j?', Number.Float), - # Handle +-inf, +-infinity, +-nan - (r'[+-]?(?:(inf(?:inity)?)|nan)', Number.Float), - (r'[+-]?\d+', Number.Integer), - - # Punctuation - (r'[]{}:(),;[]', Punctuation), - (r'\.', Punctuation), - - # Operators - (r'=', Operator) - - ] - } + # Literal string + (r'\'\'\'(.*)\'\'\'', String), + (r'\'[^\']*\'', String), + (r'(true|false)$', Keyword.Constant), + (r'[a-zA-Z_][\w\-]*', Name), + + # Datetime + # TODO this needs to be expanded, as TOML is rather flexible: + # https://github.com/toml-lang/toml#offset-date-time + (r'\d{4}-\d{2}-\d{2}(?:T| )\d{2}:\d{2}:\d{2}(?:Z|[-+]\d{2}:\d{2})', Number.Integer), + + # Numbers + (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float), + (r'\d+[eE][+-]?[0-9]+j?', Number.Float), + # Handle +-inf, +-infinity, +-nan + (r'[+-]?(?:(inf(?:inity)?)|nan)', Number.Float), + (r'[+-]?\d+', Number.Integer), + + # Punctuation + (r'[]{}:(),;[]', Punctuation), + (r'\.', Punctuation), + + # Operators + (r'=', Operator) + + ] + } class NestedTextLexer(RegexLexer): """ |