diff options
author | robot-piglet <robot-piglet@yandex-team.com> | 2024-05-20 07:58:40 +0300 |
---|---|---|
committer | robot-piglet <robot-piglet@yandex-team.com> | 2024-05-20 08:05:00 +0300 |
commit | bcd5bcc390793791d293d386b2ebefbe683fb4e1 (patch) | |
tree | c93e3b8c847237e7e7626f4a07f1b657bb34f04d /contrib/python/Pygments/py3/pygments/lexers/lean.py | |
parent | 1a9f1508fe9c8c5927ffebf33197a6108e70501d (diff) | |
download | ydb-bcd5bcc390793791d293d386b2ebefbe683fb4e1.tar.gz |
Intermediate changes
Diffstat (limited to 'contrib/python/Pygments/py3/pygments/lexers/lean.py')
-rw-r--r-- | contrib/python/Pygments/py3/pygments/lexers/lean.py | 145 |
1 files changed, 132 insertions, 13 deletions
diff --git a/contrib/python/Pygments/py3/pygments/lexers/lean.py b/contrib/python/Pygments/py3/pygments/lexers/lean.py index d16cd73c57..b44d2a0423 100644 --- a/contrib/python/Pygments/py3/pygments/lexers/lean.py +++ b/contrib/python/Pygments/py3/pygments/lexers/lean.py @@ -4,32 +4,38 @@ Lexers for the Lean theorem prover. - :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ + import re -from pygments.lexer import RegexLexer, default, words, include -from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Generic, Whitespace +from pygments.lexer import RegexLexer, words, include +from pygments.token import Comment, Operator, Keyword, Name, String, \ + Number, Generic, Whitespace -__all__ = ['Lean3Lexer'] +__all__ = ['Lean3Lexer', 'Lean4Lexer'] class Lean3Lexer(RegexLexer): """ For the Lean 3 theorem prover. - - .. versionadded:: 2.0 """ name = 'Lean' url = 'https://leanprover-community.github.io/lean3' aliases = ['lean', 'lean3'] filenames = ['*.lean'] mimetypes = ['text/x-lean', 'text/x-lean3'] + version_added = '2.0' + + # from https://github.com/leanprover/vscode-lean/blob/1589ca3a65e394b3789409707febbd2d166c9344/syntaxes/lean.json#L186C20-L186C217 + _name_segment = ( + "(?![λΠΣ])[_a-zA-Zα-ωΑ-Ωϊ-ϻἀ-῾℀-⅏𝒜-𝖟]" + "(?:(?![λΠΣ])[_a-zA-Zα-ωΑ-Ωϊ-ϻἀ-῾℀-⅏𝒜-𝖟0-9'ⁿ-₉ₐ-ₜᵢ-ᵪ])*") + _name = _name_segment + r"(\." + _name_segment + r")*" tokens = { 'expression': [ - (r'\s+', Text), + (r'\s+', Whitespace), (r'/--', String.Doc, 'docstring'), (r'/-', Comment, 'comment'), (r'--.*?$', Comment.Single), @@ -43,9 +49,8 @@ class Lean3Lexer(RegexLexer): (words(( '(', ')', ':', '{', '}', '[', ']', '⟨', '⟩', '‹', '›', '⦃', '⦄', ':=', ',', )), Operator), - (r'[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]' - r'[.A-Za-z_\'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079' - r'\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*', Name), + (_name, Name), + (r'``?' + _name, String.Symbol), (r'0x[A-Za-z0-9]+', Number.Integer), (r'0b[01]+', Number.Integer), (r'\d+', Number.Integer), @@ -102,13 +107,13 @@ class Lean3Lexer(RegexLexer): include('expression'), ], 'comment': [ - (r'[^/-]', Comment.Multiline), + (r'[^/-]+', Comment.Multiline), (r'/-', Comment.Multiline, '#push'), (r'-/', Comment.Multiline, '#pop'), (r'[/-]', Comment.Multiline) ], 'docstring': [ - (r'[^/-]', String.Doc), + (r'[^/-]+', String.Doc), (r'-/', String.Doc, '#pop'), (r'[/-]', String.Doc) ], @@ -119,4 +124,118 @@ class Lean3Lexer(RegexLexer): ], } + def analyse_text(text): + if re.search(r'^import [a-z]', text, re.MULTILINE): + return 0.1 + + LeanLexer = Lean3Lexer + + +class Lean4Lexer(RegexLexer): + """ + For the Lean 4 theorem prover. + """ + + name = 'Lean4' + url = 'https://github.com/leanprover/lean4' + aliases = ['lean4'] + filenames = ['*.lean'] + mimetypes = ['text/x-lean4'] + version_added = '2.18' + + # same as Lean3Lexer, with `!` and `?` allowed + _name_segment = ( + "(?![λΠΣ])[_a-zA-Zα-ωΑ-Ωϊ-ϻἀ-῾℀-⅏𝒜-𝖟]" + "(?:(?![λΠΣ])[_a-zA-Zα-ωΑ-Ωϊ-ϻἀ-῾℀-⅏𝒜-𝖟0-9'ⁿ-₉ₐ-ₜᵢ-ᵪ!?])*") + _name = _name_segment + r"(\." + _name_segment + r")*" + + keywords1 = ( + 'import', 'unif_hint', + 'renaming', 'inline', 'hiding', 'lemma', 'variable', + 'theorem', 'axiom', 'inductive', 'structure', 'universe', 'alias', + '#help', 'precedence', 'postfix', 'prefix', + 'infix', 'infixl', 'infixr', 'notation', '#eval', + '#check', '#reduce', '#exit', 'end', 'private', 'using', 'namespace', + 'instance', 'section', 'protected', + 'export', 'set_option', 'extends', 'open', 'example', + '#print', 'opaque', + 'def', 'macro', 'elab', 'syntax', 'macro_rules', '#reduce', 'where', + 'abbrev', 'noncomputable', 'class', 'attribute', '#synth', 'mutual', + 'scoped', 'local', + ) + + keywords2 = ( + 'forall', 'fun', 'obtain', 'from', 'have', 'show', 'assume', + 'let', 'if', 'else', 'then', 'by', 'in', 'with', + 'calc', 'match', 'nomatch', 'do', 'at', + ) + + keywords3 = ( + # Sorts + 'Type', 'Prop', 'Sort', + ) + + operators = ( + '!=', '#', '&', '&&', '*', '+', '-', '/', '@', '!', + '-.', '->', '.', '..', '...', '::', ':>', ';', ';;', '<', + '<-', '=', '==', '>', '_', '|', '||', '~', '=>', '<=', '>=', + '/\\', '\\/', '∀', 'Π', 'λ', '↔', '∧', '∨', '≠', '≤', '≥', + '¬', '⁻¹', '⬝', '▸', '→', '∃', '≈', '×', '⌞', + '⌟', '≡', '⟨', '⟩', "↦", + ) + + punctuation = ('(', ')', ':', '{', '}', '[', ']', '⦃', '⦄', + ':=', ',') + + tokens = { + 'expression': [ + (r'\s+', Whitespace), + (r'/--', String.Doc, 'docstring'), + (r'/-', Comment, 'comment'), + (r'--.*$', Comment.Single), + (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type), + (words(('sorry', 'admit'), prefix=r'\b', suffix=r'\b'), Generic.Error), + (words(operators), Name.Builtin.Pseudo), + (words(punctuation), Operator), + (_name_segment, Name), + (r'``?' + _name, String.Symbol), + (r'(?<=\.)\d+', Number), + (r'(\d+\.\d*)([eE][+-]?[0-9]+)?', Number.Float), + (r'\d+', Number.Integer), + (r'"', String.Double, 'string'), + (r'[~?][a-z][\w\']*:', Name.Variable), + (r'\S', Name.Builtin.Pseudo), + ], + 'root': [ + (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace), + (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword), + (r'@\[', Keyword.Declaration, 'attribute'), + include('expression') + ], + 'attribute': [ + (r'\]', Keyword.Declaration, '#pop'), + include('expression'), + ], + 'comment': [ + # Multiline Comments + (r'[^/-]+', Comment.Multiline), + (r'/-', Comment.Multiline, '#push'), + (r'-/', Comment.Multiline, '#pop'), + (r'[/-]', Comment.Multiline) + ], + 'docstring': [ + (r'[^/-]+', String.Doc), + (r'-/', String.Doc, '#pop'), + (r'[/-]', String.Doc) + ], + 'string': [ + (r'[^\\"]+', String.Double), + (r'\\[n"\\\n]', String.Escape), + ('"', String.Double, '#pop'), + ], + } + + def analyse_text(text): + if re.search(r'^import [A-Z]', text, re.MULTILINE): + return 0.1 |