diff options
author | orivej <orivej@yandex-team.ru> | 2022-02-10 16:45:01 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:45:01 +0300 |
commit | 2d37894b1b037cf24231090eda8589bbb44fb6fc (patch) | |
tree | be835aa92c6248212e705f25388ebafcf84bc7a1 /contrib/python/Pygments/py2/pygments/lexers/templates.py | |
parent | 718c552901d703c502ccbefdfc3c9028d608b947 (diff) | |
download | ydb-2d37894b1b037cf24231090eda8589bbb44fb6fc.tar.gz |
Restoring authorship annotation for <orivej@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/python/Pygments/py2/pygments/lexers/templates.py')
-rw-r--r-- | contrib/python/Pygments/py2/pygments/lexers/templates.py | 272 |
1 files changed, 136 insertions, 136 deletions
diff --git a/contrib/python/Pygments/py2/pygments/lexers/templates.py b/contrib/python/Pygments/py2/pygments/lexers/templates.py index f9d96ab80e..f891242cb8 100644 --- a/contrib/python/Pygments/py2/pygments/lexers/templates.py +++ b/contrib/python/Pygments/py2/pygments/lexers/templates.py @@ -5,7 +5,7 @@ Lexers for various template engines' markup. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -44,7 +44,7 @@ __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer', 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer', 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer', 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer', - 'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer'] + 'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer'] class ErbLexer(Lexer): @@ -187,13 +187,13 @@ class SmartyLexer(RegexLexer): def analyse_text(text): rv = 0.0 - if re.search(r'\{if\s+.*?\}.*?\{/if\}', text): + if re.search(r'\{if\s+.*?\}.*?\{/if\}', text): rv += 0.15 - if re.search(r'\{include\s+file=.*?\}', text): + if re.search(r'\{include\s+file=.*?\}', text): rv += 0.15 - if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text): + if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text): rv += 0.15 - if re.search(r'\{\$.*?\}', text): + if re.search(r'\{\$.*?\}', text): rv += 0.01 return rv @@ -251,7 +251,7 @@ class VelocityLexer(RegexLexer): 'funcparams': [ (r'\$!?\{?', Punctuation, 'variable'), (r'\s+', Text), - (r'[,:]', Punctuation), + (r'[,:]', Punctuation), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), (r"0[xX][0-9a-fA-F]+[Ll]?", Number), @@ -259,8 +259,8 @@ class VelocityLexer(RegexLexer): (r'(true|false|null)\b', Keyword.Constant), (r'\(', Punctuation, '#push'), (r'\)', Punctuation, '#pop'), - (r'\{', Punctuation, '#push'), - (r'\}', Punctuation, '#pop'), + (r'\{', Punctuation, '#push'), + (r'\}', Punctuation, '#pop'), (r'\[', Punctuation, '#push'), (r'\]', Punctuation, '#pop'), ] @@ -375,7 +375,7 @@ class DjangoLexer(RegexLexer): (r'\.\w+', Name.Variable), (r':?"(\\\\|\\"|[^"])*"', String.Double), (r":?'(\\\\|\\'|[^'])*'", String.Single), - (r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator), + (r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator), (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|" r"0[xX][0-9a-fA-F]+[Ll]?", Number), ], @@ -421,18 +421,18 @@ class MyghtyLexer(RegexLexer): tokens = { 'root': [ (r'\s+', Text), - (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)', + (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)', bygroups(Name.Tag, Text, Name.Function, Name.Tag, using(this), Name.Tag)), - (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)', + (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)', bygroups(Name.Tag, Name.Function, Name.Tag, using(PythonLexer), Name.Tag)), (r'(<&[^|])(.*?)(,.*?)?(&>)', bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)), - (r'(?s)(<&\|)(.*?)(,.*?)?(&>)', + (r'(?s)(<&\|)(.*?)(,.*?)?(&>)', bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)), (r'</&>', Name.Tag), - (r'(?s)(<%!?)(.*?)(%>)', + (r'(?s)(<%!?)(.*?)(%>)', bygroups(Name.Tag, using(PythonLexer), Name.Tag)), (r'(?<=^)#[^\n]*(\n|\Z)', Comment), (r'(?<=^)(%)([^\n]*)(\n|\Z)', @@ -538,20 +538,20 @@ class MasonLexer(RegexLexer): tokens = { 'root': [ (r'\s+', Text), - (r'(?s)(<%doc>)(.*?)(</%doc>)', + (r'(?s)(<%doc>)(.*?)(</%doc>)', bygroups(Name.Tag, Comment.Multiline, Name.Tag)), - (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)', + (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)', bygroups(Name.Tag, Text, Name.Function, Name.Tag, using(this), Name.Tag)), - (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)', + (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)', bygroups(Name.Tag, Name.Function, Name.Tag, using(PerlLexer), Name.Tag)), - (r'(?s)(<&[^|])(.*?)(,.*?)?(&>)', + (r'(?s)(<&[^|])(.*?)(,.*?)?(&>)', bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)), - (r'(?s)(<&\|)(.*?)(,.*?)?(&>)', + (r'(?s)(<&\|)(.*?)(,.*?)?(&>)', bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)), (r'</&>', Name.Tag), - (r'(?s)(<%!?)(.*?)(%>)', + (r'(?s)(<%!?)(.*?)(%>)', bygroups(Name.Tag, using(PerlLexer), Name.Tag)), (r'(?<=^)#[^\n]*(\n|\Z)', Comment), (r'(?<=^)(%)([^\n]*)(\n|\Z)', @@ -607,7 +607,7 @@ class MakoLexer(RegexLexer): (r'(</%)([\w.:]+)(>)', bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)), (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'), - (r'(?s)(<%(?:!?))(.*?)(%>)', + (r'(?s)(<%(?:!?))(.*?)(%>)', bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), (r'(\$\{)(.*?)(\})', bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), @@ -759,7 +759,7 @@ class CheetahLexer(RegexLexer): # TODO support other Python syntax like $foo['bar'] (r'(\$)([a-zA-Z_][\w.]*\w)', bygroups(Comment.Preproc, using(CheetahPythonLexer))), - (r'(?s)(\$\{!?)(.*?)(\})', + (r'(?s)(\$\{!?)(.*?)(\})', bygroups(Comment.Preproc, using(CheetahPythonLexer), Comment.Preproc)), (r'''(?sx) @@ -877,7 +877,7 @@ class GenshiMarkupLexer(RegexLexer): # yield style and script blocks as Other (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other), (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'), - (r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'), + (r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'), include('variable'), (r'[<$]', Other), ], @@ -942,9 +942,9 @@ class HtmlGenshiLexer(DelegatingLexer): def analyse_text(text): rv = 0.0 - if re.search(r'\$\{.*?\}', text) is not None: + if re.search(r'\$\{.*?\}', text) is not None: rv += 0.2 - if re.search(r'py:(.*?)=["\']', text) is not None: + if re.search(r'py:(.*?)=["\']', text) is not None: rv += 0.2 return rv + HtmlLexer.analyse_text(text) - 0.01 @@ -967,9 +967,9 @@ class GenshiLexer(DelegatingLexer): def analyse_text(text): rv = 0.0 - if re.search(r'\$\{.*?\}', text) is not None: + if re.search(r'\$\{.*?\}', text) is not None: rv += 0.2 - if re.search(r'py:(.*?)=["\']', text) is not None: + if re.search(r'py:(.*?)=["\']', text) is not None: rv += 0.2 return rv + XmlLexer.analyse_text(text) - 0.01 @@ -1627,7 +1627,7 @@ class SspLexer(DelegatingLexer): def analyse_text(text): rv = 0.0 - if re.search(r'val \w+\s*:', text): + if re.search(r'val \w+\s*:', text): rv += 0.6 if looks_like_xml(text): rv += 0.2 @@ -1823,34 +1823,34 @@ class HandlebarsLexer(RegexLexer): # {{opt=something}} (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)), - # Partials {{> ...}} - (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)), - (r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)), - (r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation), - 'dynamic-partial'), - - include('generic'), - ], - 'dynamic-partial': [ - (r'\s+', Text), - (r'\)', Punctuation, '#pop'), - - (r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text, - Name.Variable, Text)), - (r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text, - using(this, state='variable'))), - (r'[\w-]+', Name.Function), - - include('generic'), - ], - 'variable': [ + # Partials {{> ...}} + (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)), + (r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)), + (r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation), + 'dynamic-partial'), + + include('generic'), + ], + 'dynamic-partial': [ + (r'\s+', Text), + (r'\)', Punctuation, '#pop'), + + (r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text, + Name.Variable, Text)), + (r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text, + using(this, state='variable'))), + (r'[\w-]+', Name.Function), + + include('generic'), + ], + 'variable': [ (r'[()/@a-zA-Z][\w-]*', Name.Variable), - (r'\.[\w-]+', Name.Variable), - (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable), - ], - 'generic': [ - include('variable'), - + (r'\.[\w-]+', Name.Variable), + (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable), + ], + 'generic': [ + include('variable'), + # borrowed from DjangoLexer (r':?"(\\\\|\\"|[^"])*"', String.Double), (r":?'(\\\\|\\'|[^'])*'", String.Single), @@ -1954,7 +1954,7 @@ class LiquidLexer(RegexLexer): 'output': [ include('whitespace'), - (r'\}\}', Punctuation, '#pop'), # end of output + (r'\}\}', Punctuation, '#pop'), # end of output (r'\|', Punctuation, 'filters') ], @@ -2200,83 +2200,83 @@ class TwigHtmlLexer(DelegatingLexer): def __init__(self, **options): super(TwigHtmlLexer, self).__init__(HtmlLexer, TwigLexer, **options) - - -class Angular2Lexer(RegexLexer): - """ - Generic - `angular2 <http://victorsavkin.com/post/119943127151/angular-2-template-syntax>`_ - template lexer. - - Highlights only the Angular template tags (stuff between `{{` and `}}` and - special attributes: '(event)=', '[property]=', '[(twoWayBinding)]='). - Everything else is left for a delegating lexer. - - .. versionadded:: 2.1 - """ - - name = "Angular2" - aliases = ['ng2'] - - tokens = { - 'root': [ - (r'[^{([*#]+', Other), - - # {{meal.name}} - (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'ngExpression'), - - # (click)="deleteOrder()"; [value]="test"; [(twoWayTest)]="foo.bar" - (r'([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)', - bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text), - 'attr'), - (r'([([]+)([\w:.-]+)([\])]+)(\s*)', - bygroups(Punctuation, Name.Attribute, Punctuation, Text)), - - # *ngIf="..."; #f="ngForm" - (r'([*#])([\w:.-]+)(\s*)(=)(\s*)', - bygroups(Punctuation, Name.Attribute, Punctuation, Operator), 'attr'), - (r'([*#])([\w:.-]+)(\s*)', - bygroups(Punctuation, Name.Attribute, Punctuation)), - ], - - 'ngExpression': [ - (r'\s+(\|\s+)?', Text), - (r'\}\}', Comment.Preproc, '#pop'), - - # Literals - (r':?(true|false)', String.Boolean), - (r':?"(\\\\|\\"|[^"])*"', String.Double), - (r":?'(\\\\|\\'|[^'])*'", String.Single), - (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|" - r"0[xX][0-9a-fA-F]+[Ll]?", Number), - - # Variabletext - (r'[a-zA-Z][\w-]*(\(.*\))?', Name.Variable), - (r'\.[\w-]+(\(.*\))?', Name.Variable), - - # inline If - (r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)', - bygroups(Operator, Text, String, Text, Operator, Text, String, Text)), - ], - 'attr': [ - ('".*?"', String, '#pop'), - ("'.*?'", String, '#pop'), - (r'[^\s>]+', String, '#pop'), - ], - } - - -class Angular2HtmlLexer(DelegatingLexer): - """ - Subclass of the `Angular2Lexer` that highlights unlexed data with the - `HtmlLexer`. - - .. versionadded:: 2.0 - """ - - name = "HTML + Angular2" - aliases = ["html+ng2"] - filenames = ['*.ng2'] - - def __init__(self, **options): - super(Angular2HtmlLexer, self).__init__(HtmlLexer, Angular2Lexer, **options) + + +class Angular2Lexer(RegexLexer): + """ + Generic + `angular2 <http://victorsavkin.com/post/119943127151/angular-2-template-syntax>`_ + template lexer. + + Highlights only the Angular template tags (stuff between `{{` and `}}` and + special attributes: '(event)=', '[property]=', '[(twoWayBinding)]='). + Everything else is left for a delegating lexer. + + .. versionadded:: 2.1 + """ + + name = "Angular2" + aliases = ['ng2'] + + tokens = { + 'root': [ + (r'[^{([*#]+', Other), + + # {{meal.name}} + (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'ngExpression'), + + # (click)="deleteOrder()"; [value]="test"; [(twoWayTest)]="foo.bar" + (r'([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)', + bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text), + 'attr'), + (r'([([]+)([\w:.-]+)([\])]+)(\s*)', + bygroups(Punctuation, Name.Attribute, Punctuation, Text)), + + # *ngIf="..."; #f="ngForm" + (r'([*#])([\w:.-]+)(\s*)(=)(\s*)', + bygroups(Punctuation, Name.Attribute, Punctuation, Operator), 'attr'), + (r'([*#])([\w:.-]+)(\s*)', + bygroups(Punctuation, Name.Attribute, Punctuation)), + ], + + 'ngExpression': [ + (r'\s+(\|\s+)?', Text), + (r'\}\}', Comment.Preproc, '#pop'), + + # Literals + (r':?(true|false)', String.Boolean), + (r':?"(\\\\|\\"|[^"])*"', String.Double), + (r":?'(\\\\|\\'|[^'])*'", String.Single), + (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|" + r"0[xX][0-9a-fA-F]+[Ll]?", Number), + + # Variabletext + (r'[a-zA-Z][\w-]*(\(.*\))?', Name.Variable), + (r'\.[\w-]+(\(.*\))?', Name.Variable), + + # inline If + (r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)', + bygroups(Operator, Text, String, Text, Operator, Text, String, Text)), + ], + 'attr': [ + ('".*?"', String, '#pop'), + ("'.*?'", String, '#pop'), + (r'[^\s>]+', String, '#pop'), + ], + } + + +class Angular2HtmlLexer(DelegatingLexer): + """ + Subclass of the `Angular2Lexer` that highlights unlexed data with the + `HtmlLexer`. + + .. versionadded:: 2.0 + """ + + name = "HTML + Angular2" + aliases = ["html+ng2"] + filenames = ['*.ng2'] + + def __init__(self, **options): + super(Angular2HtmlLexer, self).__init__(HtmlLexer, Angular2Lexer, **options) |