aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/Pygments
diff options
context:
space:
mode:
authorshadchin <shadchin@yandex-team.ru>2022-02-10 16:44:30 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:44:30 +0300
commit2598ef1d0aee359b4b6d5fdd1758916d5907d04f (patch)
tree012bb94d777798f1f56ac1cec429509766d05181 /contrib/python/Pygments
parent6751af0b0c1b952fede40b19b71da8025b5d8bcf (diff)
downloadydb-2598ef1d0aee359b4b6d5fdd1758916d5907d04f.tar.gz
Restoring authorship annotation for <shadchin@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'contrib/python/Pygments')
-rw-r--r--contrib/python/Pygments/py2/.dist-info/METADATA8
-rw-r--r--contrib/python/Pygments/py2/AUTHORS456
-rw-r--r--contrib/python/Pygments/py2/README.rst88
-rw-r--r--contrib/python/Pygments/py2/pygments/__init__.py8
-rw-r--r--contrib/python/Pygments/py2/pygments/__main__.py36
-rw-r--r--contrib/python/Pygments/py2/pygments/cmdline.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/__init__.py4
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/html.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/img.py16
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/other.py14
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/rtf.py4
-rw-r--r--contrib/python/Pygments/py2/pygments/lexer.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/__init__.py18
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_asy_builtins.py8
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_cl_builtins.py28
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_cocoa_builtins.py6
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_csound_builtins.py136
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_lua_builtins.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_mapping.py28
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_mql_builtins.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/asm.py34
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/bibtex.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/c_cpp.py12
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/c_like.py54
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/configs.py56
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/csound.py72
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/data.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/dsls.py276
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/dylan.py16
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/elm.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/email.py308
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/erlang.py4
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/freefem.py42
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/haskell.py12
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/haxe.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/hdl.py120
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/html.py4
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/javascript.py30
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/jvm.py56
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/lisp.py32
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/markup.py6
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/matlab.py96
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/mime.py452
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/ml.py10
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/modeling.py4
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/pascal.py24
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/pawn.py38
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/praat.py74
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/prolog.py48
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/python.py660
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/rdf.py294
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/resource.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/robotframework.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/ruby.py10
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/rust.py12
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/scdoc.py140
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/shell.py46
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/slash.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/solidity.py186
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/special.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/sql.py216
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/templates.py26
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/teraterm.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/textedit.py6
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/textfmts.py176
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/zig.py258
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/__init__.py2
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/inkpot.py134
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/monokai.py6
-rw-r--r--contrib/python/Pygments/py2/ya.make32
-rw-r--r--contrib/python/Pygments/py3/.dist-info/METADATA18
-rw-r--r--contrib/python/Pygments/py3/AUTHORS500
-rw-r--r--contrib/python/Pygments/py3/LICENSE2
-rw-r--r--contrib/python/Pygments/py3/README.rst88
-rw-r--r--contrib/python/Pygments/py3/pygments/__init__.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/__main__.py34
-rw-r--r--contrib/python/Pygments/py3/pygments/cmdline.py428
-rw-r--r--contrib/python/Pygments/py3/pygments/console.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/filter.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/filters/__init__.py1218
-rw-r--r--contrib/python/Pygments/py3/pygments/formatter.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/__init__.py16
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/_mapping.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/bbcode.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/html.py460
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/img.py182
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/irc.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/latex.py124
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/other.py36
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/pangomarkup.py166
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/rtf.py58
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/svg.py80
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/terminal.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/terminal256.py72
-rw-r--r--contrib/python/Pygments/py3/pygments/lexer.py64
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/__init__.py42
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py30
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py32
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py340
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_julia_builtins.py802
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_mapping.py232
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_mysql_builtins.py2562
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py878
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py146
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_stata_builtins.py182
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_tsql_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_usd_builtins.py224
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_vbscript_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/actionscript.py28
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/agile.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/algebra.py42
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ambient.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/amdgpu.py106
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ampl.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/apdlexer.py890
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/apl.py40
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/archetype.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/arrow.py218
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/asc.py100
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/asm.py688
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/automation.py16
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/bare.py178
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/basic.py26
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/bibtex.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/boa.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/business.py40
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/c_cpp.py338
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/c_like.py262
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/capnproto.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/cddl.py372
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/chapel.py106
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/clean.py28
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/compiled.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/configs.py354
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/console.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/crystal.py106
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/csound.py102
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/css.py28
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/d.py24
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dalvik.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/data.py460
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/devicetree.py206
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/diff.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dotnet.py68
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dsls.py274
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dylan.py26
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ecl.py32
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/eiffel.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/elm.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/email.py300
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/erlang.py56
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/esoteric.py88
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ezhil.py42
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/factor.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/fantom.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/felix.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/floscript.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/forth.py76
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/fortran.py26
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/foxpro.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/freefem.py44
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/functional.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/futhark.py212
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/gcodelexer.py70
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/gdscript.py670
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/go.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py120
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/graph.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/graphics.py46
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/graphviz.py116
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/gsql.py146
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/haskell.py30
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/haxe.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/hdl.py348
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/hexdump.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/html.py42
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/idl.py26
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/igor.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/inferno.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/installers.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/int_fiction.py78
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/iolang.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/j.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/javascript.py432
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/jslt.py184
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/julia.py290
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/jvm.py954
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/kuin.py594
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/lisp.py1650
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/make.py20
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/markup.py424
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/math.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/matlab.py5466
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/meson.py308
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/mime.py420
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ml.py406
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/modeling.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/modula2.py54
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/monte.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/mosel.py894
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ncl.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/nimrod.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/nit.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/nix.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/oberon.py34
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/objective.py12
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ooc.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/other.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/parasail.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/parsers.py84
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/pascal.py34
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/pawn.py54
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/perl.py504
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/php.py124
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/pointless.py140
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/pony.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/praat.py76
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/procfile.py86
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/prolog.py74
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/promql.py364
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/python.py920
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/qvt.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/r.py12
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/rdf.py444
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/rebol.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/resource.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ride.py276
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/rnc.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/roboconf.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/robotframework.py44
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ruby.py90
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/rust.py172
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/sas.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/scdoc.py164
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/scripting.py160
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/sgf.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/shell.py250
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/sieve.py136
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/slash.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/smalltalk.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/smithy.py158
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/smv.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/snobol.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/solidity.py182
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/special.py92
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/sql.py550
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/stata.py18
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/supercollider.py18
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/tcl.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/teal.py174
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/templates.py226
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/teraterm.py468
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/testing.py14
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/text.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/textedit.py12
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/textfmts.py282
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/theorem.py162
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/thingsdb.py228
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/tnt.py544
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/trafficscript.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/typoscript.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/unicon.py68
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/urbi.py32
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/usd.py178
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/varnish.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/verification.py26
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/web.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/webassembly.py234
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/webidl.py596
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/webmisc.py56
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/whiley.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/x10.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/xorg.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/yang.py206
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/zig.py246
-rw-r--r--contrib/python/Pygments/py3/pygments/modeline.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/plugin.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/regexopt.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/scanner.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/sphinxext.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/style.py30
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/__init__.py16
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/abap.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/algol.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/algol_nu.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/arduino.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/autumn.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/borland.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/bw.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/colorful.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/default.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/emacs.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/friendly.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/fruity.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/gruvbox.py212
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/igor.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/inkpot.py132
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/lovelace.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/manni.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/material.py236
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/monokai.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/murphy.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/native.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/paraiso_light.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/pastie.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/perldoc.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/rainbow_dash.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/rrt.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/sas.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/solarized.py16
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/stata_dark.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/stata_light.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/tango.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/trac.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/vim.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/vs.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/xcode.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/zenburn.py160
-rw-r--r--contrib/python/Pygments/py3/pygments/token.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/unistring.py92
-rw-r--r--contrib/python/Pygments/py3/pygments/util.py32
-rw-r--r--contrib/python/Pygments/py3/ya.make102
-rw-r--r--contrib/python/Pygments/ya.make20
333 files changed, 22133 insertions, 22133 deletions
diff --git a/contrib/python/Pygments/py2/.dist-info/METADATA b/contrib/python/Pygments/py2/.dist-info/METADATA
index 4214b2f67c..2741a0de96 100644
--- a/contrib/python/Pygments/py2/.dist-info/METADATA
+++ b/contrib/python/Pygments/py2/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: Pygments
-Version: 2.5.2
+Version: 2.5.2
Summary: Pygments is a syntax highlighting package written in Python.
Home-page: http://pygments.org/
Author: Georg Brandl
@@ -20,9 +20,9 @@ Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: Implementation :: CPython
-Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Operating System :: OS Independent
Classifier: Topic :: Text Processing :: Filters
Classifier: Topic :: Utilities
diff --git a/contrib/python/Pygments/py2/AUTHORS b/contrib/python/Pygments/py2/AUTHORS
index f7a7acadf6..ee02af7311 100644
--- a/contrib/python/Pygments/py2/AUTHORS
+++ b/contrib/python/Pygments/py2/AUTHORS
@@ -1,228 +1,228 @@
-Pygments is written and maintained by Georg Brandl <georg@python.org>.
-
-Major developers are Tim Hatch <tim@timhatch.com> and Armin Ronacher
-<armin.ronacher@active-4.com>.
-
-Other contributors, listed alphabetically, are:
-
-* Sam Aaron -- Ioke lexer
-* Ali Afshar -- image formatter
-* Thomas Aglassinger -- Easytrieve, JCL, Rexx, Transact-SQL and VBScript
- lexers
-* Muthiah Annamalai -- Ezhil lexer
-* Kumar Appaiah -- Debian control lexer
-* Andreas Amann -- AppleScript lexer
-* Timothy Armstrong -- Dart lexer fixes
-* Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers
-* Jeremy Ashkenas -- CoffeeScript lexer
-* José Joaquín Atria -- Praat lexer
-* Stefan Matthias Aust -- Smalltalk lexer
-* Lucas Bajolet -- Nit lexer
-* Ben Bangert -- Mako lexers
-* Max Battcher -- Darcs patch lexer
-* Thomas Baruchel -- APL lexer
-* Tim Baumann -- (Literate) Agda lexer
-* Paul Baumgart, 280 North, Inc. -- Objective-J lexer
-* Michael Bayer -- Myghty lexers
-* Thomas Beale -- Archetype lexers
-* John Benediktsson -- Factor lexer
-* Trevor Bergeron -- mIRC formatter
-* Vincent Bernat -- LessCSS lexer
-* Christopher Bertels -- Fancy lexer
-* Sébastien Bigaret -- QVT Operational lexer
-* Jarrett Billingsley -- MiniD lexer
-* Adam Blinkinsop -- Haskell, Redcode lexers
-* Stéphane Blondon -- SGF lexer
-* Frits van Bommel -- assembler lexers
-* Pierre Bourdon -- bugfixes
-* Matthias Bussonnier -- ANSI style handling for terminal-256 formatter
-* chebee7i -- Python traceback lexer improvements
-* Hiram Chirino -- Scaml and Jade lexers
-* Mauricio Caceres -- SAS and Stata lexers.
-* Ian Cooper -- VGL lexer
-* David Corbett -- Inform, Jasmin, JSGF, Snowball, and TADS 3 lexers
-* Leaf Corcoran -- MoonScript lexer
-* Christopher Creutzig -- MuPAD lexer
-* Daniël W. Crompton -- Pike lexer
-* Pete Curry -- bugfixes
-* Bryan Davis -- EBNF lexer
-* Bruno Deferrari -- Shen lexer
-* Giedrius Dubinskas -- HTML formatter improvements
-* Owen Durni -- Haxe lexer
-* Alexander Dutton, Oxford University Computing Services -- SPARQL lexer
-* James Edwards -- Terraform lexer
-* Nick Efford -- Python 3 lexer
-* Sven Efftinge -- Xtend lexer
-* Artem Egorkine -- terminal256 formatter
-* Matthew Fernandez -- CAmkES lexer
-* Michael Ficarra -- CPSA lexer
-* James H. Fisher -- PostScript lexer
-* William S. Fulton -- SWIG lexer
-* Carlos Galdino -- Elixir and Elixir Console lexers
-* Michael Galloy -- IDL lexer
-* Naveen Garg -- Autohotkey lexer
-* Laurent Gautier -- R/S lexer
-* Alex Gaynor -- PyPy log lexer
-* Richard Gerkin -- Igor Pro lexer
-* Alain Gilbert -- TypeScript lexer
-* Alex Gilding -- BlitzBasic lexer
-* Bertrand Goetzmann -- Groovy lexer
-* Krzysiek Goj -- Scala lexer
-* Andrey Golovizin -- BibTeX lexers
-* Matt Good -- Genshi, Cheetah lexers
-* Michał Górny -- vim modeline support
-* Alex Gosse -- TrafficScript lexer
-* Patrick Gotthardt -- PHP namespaces support
-* Olivier Guibe -- Asymptote lexer
-* Phil Hagelberg -- Fennel lexer
-* Florian Hahn -- Boogie lexer
-* Martin Harriman -- SNOBOL lexer
-* Matthew Harrison -- SVG formatter
-* Steven Hazel -- Tcl lexer
-* Dan Michael Heggø -- Turtle lexer
-* Aslak Hellesøy -- Gherkin lexer
-* Greg Hendershott -- Racket lexer
-* Justin Hendrick -- ParaSail lexer
-* Jordi Gutiérrez Hermoso -- Octave lexer
-* David Hess, Fish Software, Inc. -- Objective-J lexer
-* Varun Hiremath -- Debian control lexer
-* Rob Hoelz -- Perl 6 lexer
-* Doug Hogan -- Mscgen lexer
-* Ben Hollis -- Mason lexer
-* Max Horn -- GAP lexer
-* Alastair Houghton -- Lexer inheritance facility
-* Tim Howard -- BlitzMax lexer
-* Dustin Howett -- Logos lexer
-* Ivan Inozemtsev -- Fantom lexer
-* Hiroaki Itoh -- Shell console rewrite, Lexers for PowerShell session,
- MSDOS session, BC, WDiff
-* Brian R. Jackson -- Tea lexer
-* Christian Jann -- ShellSession lexer
-* Dennis Kaarsemaker -- sources.list lexer
-* Dmitri Kabak -- Inferno Limbo lexer
-* Igor Kalnitsky -- vhdl lexer
-* Alexander Kit -- MaskJS lexer
-* Pekka Klärck -- Robot Framework lexer
-* Gerwin Klein -- Isabelle lexer
-* Eric Knibbe -- Lasso lexer
-* Stepan Koltsov -- Clay lexer
-* Adam Koprowski -- Opa lexer
-* Benjamin Kowarsch -- Modula-2 lexer
-* Domen Kožar -- Nix lexer
-* Oleh Krekel -- Emacs Lisp lexer
-* Alexander Kriegisch -- Kconfig and AspectJ lexers
-* Marek Kubica -- Scheme lexer
-* Jochen Kupperschmidt -- Markdown processor
-* Gerd Kurzbach -- Modelica lexer
-* Jon Larimer, Google Inc. -- Smali lexer
-* Olov Lassus -- Dart lexer
-* Matt Layman -- TAP lexer
-* Kristian Lyngstøl -- Varnish lexers
-* Sylvestre Ledru -- Scilab lexer
-* Chee Sing Lee -- Flatline lexer
-* Mark Lee -- Vala lexer
-* Valentin Lorentz -- C++ lexer improvements
-* Ben Mabey -- Gherkin lexer
-* Angus MacArthur -- QML lexer
-* Louis Mandel -- X10 lexer
-* Louis Marchand -- Eiffel lexer
-* Simone Margaritelli -- Hybris lexer
-* Kirk McDonald -- D lexer
-* Gordon McGregor -- SystemVerilog lexer
-* Stephen McKamey -- Duel/JBST lexer
-* Brian McKenna -- F# lexer
-* Charles McLaughlin -- Puppet lexer
-* Kurt McKee -- Tera Term macro lexer
-* Lukas Meuser -- BBCode formatter, Lua lexer
-* Cat Miller -- Pig lexer
-* Paul Miller -- LiveScript lexer
-* Hong Minhee -- HTTP lexer
-* Michael Mior -- Awk lexer
-* Bruce Mitchener -- Dylan lexer rewrite
-* Reuben Morais -- SourcePawn lexer
-* Jon Morton -- Rust lexer
-* Paulo Moura -- Logtalk lexer
-* Mher Movsisyan -- DTD lexer
-* Dejan Muhamedagic -- Crmsh lexer
-* Ana Nelson -- Ragel, ANTLR, R console lexers
-* Kurt Neufeld -- Markdown lexer
-* Nam T. Nguyen -- Monokai style
-* Jesper Noehr -- HTML formatter "anchorlinenos"
-* Mike Nolta -- Julia lexer
-* Jonas Obrist -- BBCode lexer
-* Edward O'Callaghan -- Cryptol lexer
-* David Oliva -- Rebol lexer
-* Pat Pannuto -- nesC lexer
-* Jon Parise -- Protocol buffers and Thrift lexers
-* Benjamin Peterson -- Test suite refactoring
-* Ronny Pfannschmidt -- BBCode lexer
-* Dominik Picheta -- Nimrod lexer
-* Andrew Pinkham -- RTF Formatter Refactoring
-* Clément Prévost -- UrbiScript lexer
-* Tanner Prynn -- cmdline -x option and loading lexers from files
-* Oleh Prypin -- Crystal lexer (based on Ruby lexer)
-* Elias Rabel -- Fortran fixed form lexer
-* raichoo -- Idris lexer
-* Kashif Rasul -- CUDA lexer
-* Nathan Reed -- HLSL lexer
-* Justin Reidy -- MXML lexer
-* Norman Richards -- JSON lexer
-* Corey Richardson -- Rust lexer updates
-* Lubomir Rintel -- GoodData MAQL and CL lexers
-* Andre Roberge -- Tango style
-* Georg Rollinger -- HSAIL lexer
-* Michiel Roos -- TypoScript lexer
-* Konrad Rudolph -- LaTeX formatter enhancements
-* Mario Ruggier -- Evoque lexers
-* Miikka Salminen -- Lovelace style, Hexdump lexer, lexer enhancements
-* Stou Sandalski -- NumPy, FORTRAN, tcsh and XSLT lexers
-* Matteo Sasso -- Common Lisp lexer
-* Joe Schafer -- Ada lexer
-* Ken Schutte -- Matlab lexers
-* René Schwaiger -- Rainbow Dash style
-* Sebastian Schweizer -- Whiley lexer
-* Tassilo Schweyer -- Io, MOOCode lexers
-* Ted Shaw -- AutoIt lexer
-* Joerg Sieker -- ABAP lexer
-* Robert Simmons -- Standard ML lexer
-* Kirill Simonov -- YAML lexer
-* Corbin Simpson -- Monte lexer
-* Alexander Smishlajev -- Visual FoxPro lexer
-* Steve Spigarelli -- XQuery lexer
-* Jerome St-Louis -- eC lexer
-* Camil Staps -- Clean and NuSMV lexers; Solarized style
-* James Strachan -- Kotlin lexer
-* Tom Stuart -- Treetop lexer
-* Colin Sullivan -- SuperCollider lexer
-* Ben Swift -- Extempore lexer
-* Edoardo Tenani -- Arduino lexer
-* Tiberius Teng -- default style overhaul
-* Jeremy Thurgood -- Erlang, Squid config lexers
-* Brian Tiffin -- OpenCOBOL lexer
-* Bob Tolbert -- Hy lexer
-* Matthias Trute -- Forth lexer
-* Erick Tryzelaar -- Felix lexer
-* Alexander Udalov -- Kotlin lexer improvements
-* Thomas Van Doren -- Chapel lexer
-* Daniele Varrazzo -- PostgreSQL lexers
-* Abe Voelker -- OpenEdge ABL lexer
-* Pepijn de Vos -- HTML formatter CTags support
-* Matthias Vallentin -- Bro lexer
-* Benoît Vinot -- AMPL lexer
-* Linh Vu Hong -- RSL lexer
-* Nathan Weizenbaum -- Haml and Sass lexers
-* Nathan Whetsell -- Csound lexers
-* Dietmar Winkler -- Modelica lexer
-* Nils Winter -- Smalltalk lexer
-* Davy Wybiral -- Clojure lexer
-* Whitney Young -- ObjectiveC lexer
-* Diego Zamboni -- CFengine3 lexer
-* Enrique Zamudio -- Ceylon lexer
-* Alex Zimin -- Nemerle lexer
-* Rob Zimmerman -- Kal lexer
-* Vincent Zurczak -- Roboconf lexer
-* Rostyslav Golda -- FloScript lexer
-* GitHub, Inc -- DASM16, Augeas, TOML, and Slash lexers
-* Simon Garnotel -- FreeFem++ lexer
-
-Many thanks for all contributions!
+Pygments is written and maintained by Georg Brandl <georg@python.org>.
+
+Major developers are Tim Hatch <tim@timhatch.com> and Armin Ronacher
+<armin.ronacher@active-4.com>.
+
+Other contributors, listed alphabetically, are:
+
+* Sam Aaron -- Ioke lexer
+* Ali Afshar -- image formatter
+* Thomas Aglassinger -- Easytrieve, JCL, Rexx, Transact-SQL and VBScript
+ lexers
+* Muthiah Annamalai -- Ezhil lexer
+* Kumar Appaiah -- Debian control lexer
+* Andreas Amann -- AppleScript lexer
+* Timothy Armstrong -- Dart lexer fixes
+* Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers
+* Jeremy Ashkenas -- CoffeeScript lexer
+* José Joaquín Atria -- Praat lexer
+* Stefan Matthias Aust -- Smalltalk lexer
+* Lucas Bajolet -- Nit lexer
+* Ben Bangert -- Mako lexers
+* Max Battcher -- Darcs patch lexer
+* Thomas Baruchel -- APL lexer
+* Tim Baumann -- (Literate) Agda lexer
+* Paul Baumgart, 280 North, Inc. -- Objective-J lexer
+* Michael Bayer -- Myghty lexers
+* Thomas Beale -- Archetype lexers
+* John Benediktsson -- Factor lexer
+* Trevor Bergeron -- mIRC formatter
+* Vincent Bernat -- LessCSS lexer
+* Christopher Bertels -- Fancy lexer
+* Sébastien Bigaret -- QVT Operational lexer
+* Jarrett Billingsley -- MiniD lexer
+* Adam Blinkinsop -- Haskell, Redcode lexers
+* Stéphane Blondon -- SGF lexer
+* Frits van Bommel -- assembler lexers
+* Pierre Bourdon -- bugfixes
+* Matthias Bussonnier -- ANSI style handling for terminal-256 formatter
+* chebee7i -- Python traceback lexer improvements
+* Hiram Chirino -- Scaml and Jade lexers
+* Mauricio Caceres -- SAS and Stata lexers.
+* Ian Cooper -- VGL lexer
+* David Corbett -- Inform, Jasmin, JSGF, Snowball, and TADS 3 lexers
+* Leaf Corcoran -- MoonScript lexer
+* Christopher Creutzig -- MuPAD lexer
+* Daniël W. Crompton -- Pike lexer
+* Pete Curry -- bugfixes
+* Bryan Davis -- EBNF lexer
+* Bruno Deferrari -- Shen lexer
+* Giedrius Dubinskas -- HTML formatter improvements
+* Owen Durni -- Haxe lexer
+* Alexander Dutton, Oxford University Computing Services -- SPARQL lexer
+* James Edwards -- Terraform lexer
+* Nick Efford -- Python 3 lexer
+* Sven Efftinge -- Xtend lexer
+* Artem Egorkine -- terminal256 formatter
+* Matthew Fernandez -- CAmkES lexer
+* Michael Ficarra -- CPSA lexer
+* James H. Fisher -- PostScript lexer
+* William S. Fulton -- SWIG lexer
+* Carlos Galdino -- Elixir and Elixir Console lexers
+* Michael Galloy -- IDL lexer
+* Naveen Garg -- Autohotkey lexer
+* Laurent Gautier -- R/S lexer
+* Alex Gaynor -- PyPy log lexer
+* Richard Gerkin -- Igor Pro lexer
+* Alain Gilbert -- TypeScript lexer
+* Alex Gilding -- BlitzBasic lexer
+* Bertrand Goetzmann -- Groovy lexer
+* Krzysiek Goj -- Scala lexer
+* Andrey Golovizin -- BibTeX lexers
+* Matt Good -- Genshi, Cheetah lexers
+* Michał Górny -- vim modeline support
+* Alex Gosse -- TrafficScript lexer
+* Patrick Gotthardt -- PHP namespaces support
+* Olivier Guibe -- Asymptote lexer
+* Phil Hagelberg -- Fennel lexer
+* Florian Hahn -- Boogie lexer
+* Martin Harriman -- SNOBOL lexer
+* Matthew Harrison -- SVG formatter
+* Steven Hazel -- Tcl lexer
+* Dan Michael Heggø -- Turtle lexer
+* Aslak Hellesøy -- Gherkin lexer
+* Greg Hendershott -- Racket lexer
+* Justin Hendrick -- ParaSail lexer
+* Jordi Gutiérrez Hermoso -- Octave lexer
+* David Hess, Fish Software, Inc. -- Objective-J lexer
+* Varun Hiremath -- Debian control lexer
+* Rob Hoelz -- Perl 6 lexer
+* Doug Hogan -- Mscgen lexer
+* Ben Hollis -- Mason lexer
+* Max Horn -- GAP lexer
+* Alastair Houghton -- Lexer inheritance facility
+* Tim Howard -- BlitzMax lexer
+* Dustin Howett -- Logos lexer
+* Ivan Inozemtsev -- Fantom lexer
+* Hiroaki Itoh -- Shell console rewrite, Lexers for PowerShell session,
+ MSDOS session, BC, WDiff
+* Brian R. Jackson -- Tea lexer
+* Christian Jann -- ShellSession lexer
+* Dennis Kaarsemaker -- sources.list lexer
+* Dmitri Kabak -- Inferno Limbo lexer
+* Igor Kalnitsky -- vhdl lexer
+* Alexander Kit -- MaskJS lexer
+* Pekka Klärck -- Robot Framework lexer
+* Gerwin Klein -- Isabelle lexer
+* Eric Knibbe -- Lasso lexer
+* Stepan Koltsov -- Clay lexer
+* Adam Koprowski -- Opa lexer
+* Benjamin Kowarsch -- Modula-2 lexer
+* Domen Kožar -- Nix lexer
+* Oleh Krekel -- Emacs Lisp lexer
+* Alexander Kriegisch -- Kconfig and AspectJ lexers
+* Marek Kubica -- Scheme lexer
+* Jochen Kupperschmidt -- Markdown processor
+* Gerd Kurzbach -- Modelica lexer
+* Jon Larimer, Google Inc. -- Smali lexer
+* Olov Lassus -- Dart lexer
+* Matt Layman -- TAP lexer
+* Kristian Lyngstøl -- Varnish lexers
+* Sylvestre Ledru -- Scilab lexer
+* Chee Sing Lee -- Flatline lexer
+* Mark Lee -- Vala lexer
+* Valentin Lorentz -- C++ lexer improvements
+* Ben Mabey -- Gherkin lexer
+* Angus MacArthur -- QML lexer
+* Louis Mandel -- X10 lexer
+* Louis Marchand -- Eiffel lexer
+* Simone Margaritelli -- Hybris lexer
+* Kirk McDonald -- D lexer
+* Gordon McGregor -- SystemVerilog lexer
+* Stephen McKamey -- Duel/JBST lexer
+* Brian McKenna -- F# lexer
+* Charles McLaughlin -- Puppet lexer
+* Kurt McKee -- Tera Term macro lexer
+* Lukas Meuser -- BBCode formatter, Lua lexer
+* Cat Miller -- Pig lexer
+* Paul Miller -- LiveScript lexer
+* Hong Minhee -- HTTP lexer
+* Michael Mior -- Awk lexer
+* Bruce Mitchener -- Dylan lexer rewrite
+* Reuben Morais -- SourcePawn lexer
+* Jon Morton -- Rust lexer
+* Paulo Moura -- Logtalk lexer
+* Mher Movsisyan -- DTD lexer
+* Dejan Muhamedagic -- Crmsh lexer
+* Ana Nelson -- Ragel, ANTLR, R console lexers
+* Kurt Neufeld -- Markdown lexer
+* Nam T. Nguyen -- Monokai style
+* Jesper Noehr -- HTML formatter "anchorlinenos"
+* Mike Nolta -- Julia lexer
+* Jonas Obrist -- BBCode lexer
+* Edward O'Callaghan -- Cryptol lexer
+* David Oliva -- Rebol lexer
+* Pat Pannuto -- nesC lexer
+* Jon Parise -- Protocol buffers and Thrift lexers
+* Benjamin Peterson -- Test suite refactoring
+* Ronny Pfannschmidt -- BBCode lexer
+* Dominik Picheta -- Nimrod lexer
+* Andrew Pinkham -- RTF Formatter Refactoring
+* Clément Prévost -- UrbiScript lexer
+* Tanner Prynn -- cmdline -x option and loading lexers from files
+* Oleh Prypin -- Crystal lexer (based on Ruby lexer)
+* Elias Rabel -- Fortran fixed form lexer
+* raichoo -- Idris lexer
+* Kashif Rasul -- CUDA lexer
+* Nathan Reed -- HLSL lexer
+* Justin Reidy -- MXML lexer
+* Norman Richards -- JSON lexer
+* Corey Richardson -- Rust lexer updates
+* Lubomir Rintel -- GoodData MAQL and CL lexers
+* Andre Roberge -- Tango style
+* Georg Rollinger -- HSAIL lexer
+* Michiel Roos -- TypoScript lexer
+* Konrad Rudolph -- LaTeX formatter enhancements
+* Mario Ruggier -- Evoque lexers
+* Miikka Salminen -- Lovelace style, Hexdump lexer, lexer enhancements
+* Stou Sandalski -- NumPy, FORTRAN, tcsh and XSLT lexers
+* Matteo Sasso -- Common Lisp lexer
+* Joe Schafer -- Ada lexer
+* Ken Schutte -- Matlab lexers
+* René Schwaiger -- Rainbow Dash style
+* Sebastian Schweizer -- Whiley lexer
+* Tassilo Schweyer -- Io, MOOCode lexers
+* Ted Shaw -- AutoIt lexer
+* Joerg Sieker -- ABAP lexer
+* Robert Simmons -- Standard ML lexer
+* Kirill Simonov -- YAML lexer
+* Corbin Simpson -- Monte lexer
+* Alexander Smishlajev -- Visual FoxPro lexer
+* Steve Spigarelli -- XQuery lexer
+* Jerome St-Louis -- eC lexer
+* Camil Staps -- Clean and NuSMV lexers; Solarized style
+* James Strachan -- Kotlin lexer
+* Tom Stuart -- Treetop lexer
+* Colin Sullivan -- SuperCollider lexer
+* Ben Swift -- Extempore lexer
+* Edoardo Tenani -- Arduino lexer
+* Tiberius Teng -- default style overhaul
+* Jeremy Thurgood -- Erlang, Squid config lexers
+* Brian Tiffin -- OpenCOBOL lexer
+* Bob Tolbert -- Hy lexer
+* Matthias Trute -- Forth lexer
+* Erick Tryzelaar -- Felix lexer
+* Alexander Udalov -- Kotlin lexer improvements
+* Thomas Van Doren -- Chapel lexer
+* Daniele Varrazzo -- PostgreSQL lexers
+* Abe Voelker -- OpenEdge ABL lexer
+* Pepijn de Vos -- HTML formatter CTags support
+* Matthias Vallentin -- Bro lexer
+* Benoît Vinot -- AMPL lexer
+* Linh Vu Hong -- RSL lexer
+* Nathan Weizenbaum -- Haml and Sass lexers
+* Nathan Whetsell -- Csound lexers
+* Dietmar Winkler -- Modelica lexer
+* Nils Winter -- Smalltalk lexer
+* Davy Wybiral -- Clojure lexer
+* Whitney Young -- ObjectiveC lexer
+* Diego Zamboni -- CFengine3 lexer
+* Enrique Zamudio -- Ceylon lexer
+* Alex Zimin -- Nemerle lexer
+* Rob Zimmerman -- Kal lexer
+* Vincent Zurczak -- Roboconf lexer
+* Rostyslav Golda -- FloScript lexer
+* GitHub, Inc -- DASM16, Augeas, TOML, and Slash lexers
+* Simon Garnotel -- FreeFem++ lexer
+
+Many thanks for all contributions!
diff --git a/contrib/python/Pygments/py2/README.rst b/contrib/python/Pygments/py2/README.rst
index ef0cbfd248..9d59609bcd 100644
--- a/contrib/python/Pygments/py2/README.rst
+++ b/contrib/python/Pygments/py2/README.rst
@@ -1,44 +1,44 @@
-Welcome to Pygments
-===================
-
-This is the source of Pygments. It is a **generic syntax highlighter** written
-in Python that supports over 300 languages and text formats, for use in code
-hosting, forums, wikis or other applications that need to prettify source code.
-
-Installing
-----------
-
-... works as usual, use ``pip install Pygments`` to get published versions,
-or ``python setup.py install`` to install from a checkout.
-
-Documentation
--------------
-
-... can be found online at http://pygments.org/ or created with Sphinx by ::
-
- cd doc
- make html
-
-Development
------------
-
-... takes place on `GitHub <https://github.com/pygments/pygments>`_, where the
-Git repository, tickets and pull requests can be viewed.
-
-Continuous testing runs on GitHub workflows:
-
-.. image:: https://github.com/pygments/pygments/workflows/Pygments/badge.svg
- :target: https://github.com/pygments/pygments/actions?query=workflow%3APygments
-
-The authors
------------
-
-Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*
-and **Matthäus Chajdas**.
-
-Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of
-the `Pocoo <http://dev.pocoo.org/>`_ team and **Tim Hatch**.
-
-The code is distributed under the BSD 2-clause license. Contributors making pull
-requests must agree that they are able and willing to put their contributions
-under that license.
+Welcome to Pygments
+===================
+
+This is the source of Pygments. It is a **generic syntax highlighter** written
+in Python that supports over 300 languages and text formats, for use in code
+hosting, forums, wikis or other applications that need to prettify source code.
+
+Installing
+----------
+
+... works as usual, use ``pip install Pygments`` to get published versions,
+or ``python setup.py install`` to install from a checkout.
+
+Documentation
+-------------
+
+... can be found online at http://pygments.org/ or created with Sphinx by ::
+
+ cd doc
+ make html
+
+Development
+-----------
+
+... takes place on `GitHub <https://github.com/pygments/pygments>`_, where the
+Git repository, tickets and pull requests can be viewed.
+
+Continuous testing runs on GitHub workflows:
+
+.. image:: https://github.com/pygments/pygments/workflows/Pygments/badge.svg
+ :target: https://github.com/pygments/pygments/actions?query=workflow%3APygments
+
+The authors
+-----------
+
+Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*
+and **Matthäus Chajdas**.
+
+Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of
+the `Pocoo <http://dev.pocoo.org/>`_ team and **Tim Hatch**.
+
+The code is distributed under the BSD 2-clause license. Contributors making pull
+requests must agree that they are able and willing to put their contributions
+under that license.
diff --git a/contrib/python/Pygments/py2/pygments/__init__.py b/contrib/python/Pygments/py2/pygments/__init__.py
index 89efc350ee..4b2b62f086 100644
--- a/contrib/python/Pygments/py2/pygments/__init__.py
+++ b/contrib/python/Pygments/py2/pygments/__init__.py
@@ -17,10 +17,10 @@
* it is usable as a command-line tool and as a library
* ... and it highlights even Brainfuck!
- The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
+ The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
- .. _Pygments master branch:
- https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
+ .. _Pygments master branch:
+ https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
@@ -29,7 +29,7 @@ import sys
from pygments.util import StringIO, BytesIO
-__version__ = '2.5.2'
+__version__ = '2.5.2'
__docformat__ = 'restructuredtext'
__all__ = ['lex', 'format', 'highlight']
diff --git a/contrib/python/Pygments/py2/pygments/__main__.py b/contrib/python/Pygments/py2/pygments/__main__.py
index af231b3d63..6a0376081a 100644
--- a/contrib/python/Pygments/py2/pygments/__main__.py
+++ b/contrib/python/Pygments/py2/pygments/__main__.py
@@ -1,18 +1,18 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.__main__
- ~~~~~~~~~~~~~~~~~
-
- Main entry point for ``python -m pygments``.
-
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import sys
-import pygments.cmdline
-
-try:
- sys.exit(pygments.cmdline.main(sys.argv))
-except KeyboardInterrupt:
- sys.exit(1)
+# -*- coding: utf-8 -*-
+"""
+ pygments.__main__
+ ~~~~~~~~~~~~~~~~~
+
+ Main entry point for ``python -m pygments``.
+
+ :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import sys
+import pygments.cmdline
+
+try:
+ sys.exit(pygments.cmdline.main(sys.argv))
+except KeyboardInterrupt:
+ sys.exit(1)
diff --git a/contrib/python/Pygments/py2/pygments/cmdline.py b/contrib/python/Pygments/py2/pygments/cmdline.py
index 34752d66b2..c45eb69805 100644
--- a/contrib/python/Pygments/py2/pygments/cmdline.py
+++ b/contrib/python/Pygments/py2/pygments/cmdline.py
@@ -554,7 +554,7 @@ def main(args=sys.argv):
file=sys.stderr)
print('Please report the whole traceback to the issue tracker at',
file=sys.stderr)
- print('<https://github.com/pygments/pygments/issues>.',
+ print('<https://github.com/pygments/pygments/issues>.',
file=sys.stderr)
print('*' * 65, file=sys.stderr)
print(file=sys.stderr)
diff --git a/contrib/python/Pygments/py2/pygments/formatters/__init__.py b/contrib/python/Pygments/py2/pygments/formatters/__init__.py
index 6f1130a801..329a21c0e4 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/__init__.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/__init__.py
@@ -108,8 +108,8 @@ def load_formatter_from_file(filename, formattername="CustomFormatter",
# And finally instantiate it with the options
return formatter_class(**options)
except IOError as err:
- raise ClassNotFound('cannot read %s: %s' % (filename, err))
- except ClassNotFound:
+ raise ClassNotFound('cannot read %s: %s' % (filename, err))
+ except ClassNotFound:
raise
except Exception as err:
raise ClassNotFound('error when loading custom formatter: %s' % err)
diff --git a/contrib/python/Pygments/py2/pygments/formatters/html.py b/contrib/python/Pygments/py2/pygments/formatters/html.py
index 042f04cfb1..9c8bff0547 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/html.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/html.py
@@ -435,7 +435,7 @@ class HtmlFormatter(Formatter):
self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
self.nobackground = get_bool_opt(options, 'nobackground', False)
- self.lineseparator = options.get('lineseparator', u'\n')
+ self.lineseparator = options.get('lineseparator', u'\n')
self.lineanchors = options.get('lineanchors', '')
self.linespans = options.get('linespans', '')
self.anchorlinenos = options.get('anchorlinenos', False)
diff --git a/contrib/python/Pygments/py2/pygments/formatters/img.py b/contrib/python/Pygments/py2/pygments/formatters/img.py
index 6bb3364458..d1737a7a74 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/img.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/img.py
@@ -46,9 +46,9 @@ STYLES = {
}
# A sane default for modern systems
-DEFAULT_FONT_NAME_NIX = 'DejaVu Sans Mono'
+DEFAULT_FONT_NAME_NIX = 'DejaVu Sans Mono'
DEFAULT_FONT_NAME_WIN = 'Courier New'
-DEFAULT_FONT_NAME_MAC = 'Menlo'
+DEFAULT_FONT_NAME_MAC = 'Menlo'
class PilNotAvailable(ImportError):
@@ -125,8 +125,8 @@ class FontManager(object):
for font_dir in (os.path.join(os.getenv("HOME"), 'Library/Fonts/'),
'/Library/Fonts/', '/System/Library/Fonts/'):
font_map.update(
- (os.path.splitext(f)[0].lower(), os.path.join(font_dir, f))
- for f in os.listdir(font_dir) if f.lower().endswith('ttf'))
+ (os.path.splitext(f)[0].lower(), os.path.join(font_dir, f))
+ for f in os.listdir(font_dir) if f.lower().endswith('ttf'))
for name in STYLES['NORMAL']:
path = self._get_mac_font_path(font_map, self.font_name, name)
@@ -237,8 +237,8 @@ class ImageFormatter(Formatter):
bold and italic fonts will be generated. This really should be a
monospace font to look sane.
- Default: "Courier New" on Windows, "Menlo" on Mac OS, and
- "DejaVu Sans Mono" on \\*nix
+ Default: "Courier New" on Windows, "Menlo" on Mac OS, and
+ "DejaVu Sans Mono" on \\*nix
`font_size`
The font size in points to be used.
@@ -522,8 +522,8 @@ class ImageFormatter(Formatter):
rectw = self.image_pad + self.line_number_width - self.line_number_pad
draw.rectangle([(0, 0), (rectw, recth)],
fill=self.line_number_bg)
- if self.line_number_separator:
- draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
+ if self.line_number_separator:
+ draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
del draw
def format(self, tokensource, outfile):
diff --git a/contrib/python/Pygments/py2/pygments/formatters/other.py b/contrib/python/Pygments/py2/pygments/formatters/other.py
index c09eff0cb4..f3fe919a50 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/other.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/other.py
@@ -10,7 +10,7 @@
"""
from pygments.formatter import Formatter
-from pygments.util import get_choice_opt
+from pygments.util import get_choice_opt
from pygments.token import Token
from pygments.console import colorize
@@ -87,17 +87,17 @@ class RawTokenFormatter(Formatter):
if self.compress == 'gz':
import gzip
outfile = gzip.GzipFile('', 'wb', 9, outfile)
-
+
def write(text):
outfile.write(text.encode())
flush = outfile.flush
elif self.compress == 'bz2':
import bz2
compressor = bz2.BZ2Compressor(9)
-
+
def write(text):
outfile.write(compressor.compress(text.encode()))
-
+
def flush():
outfile.write(compressor.flush())
outfile.flush()
@@ -118,15 +118,15 @@ class RawTokenFormatter(Formatter):
write("%s\t%r\n" % (ttype, value))
flush()
-
+
TESTCASE_BEFORE = u'''\
- def testNeedsName(lexer):
+ def testNeedsName(lexer):
fragment = %r
tokens = [
'''
TESTCASE_AFTER = u'''\
]
- assert list(lexer.get_tokens(fragment)) == tokens
+ assert list(lexer.get_tokens(fragment)) == tokens
'''
diff --git a/contrib/python/Pygments/py2/pygments/formatters/rtf.py b/contrib/python/Pygments/py2/pygments/formatters/rtf.py
index 1246db2a73..b980b19281 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/rtf.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/rtf.py
@@ -35,7 +35,7 @@ class RtfFormatter(Formatter):
``'default'``).
`fontface`
- The used font family, for example ``Bitstream Vera Sans``. Defaults to
+ The used font family, for example ``Bitstream Vera Sans``. Defaults to
some generic font which is supposed to have fixed width.
`fontsize`
@@ -70,7 +70,7 @@ class RtfFormatter(Formatter):
.replace(u'}', u'\\}')
def _escape_text(self, text):
- # empty strings, should give a small performance improvement
+ # empty strings, should give a small performance improvement
if not text:
return u''
diff --git a/contrib/python/Pygments/py2/pygments/lexer.py b/contrib/python/Pygments/py2/pygments/lexer.py
index 56a7e1e8e3..5d717d3f88 100644
--- a/contrib/python/Pygments/py2/pygments/lexer.py
+++ b/contrib/python/Pygments/py2/pygments/lexer.py
@@ -469,7 +469,7 @@ class RegexLexerMeta(LexerMeta):
def _process_state(cls, unprocessed, processed, state):
"""Preprocess a single state definition."""
- assert type(state) is str, "wrong state name %r" % state
+ assert type(state) is str, "wrong state name %r" % state
assert state[0] != '#', "invalid state name %r" % state
if state in processed:
return processed[state]
diff --git a/contrib/python/Pygments/py2/pygments/lexers/__init__.py b/contrib/python/Pygments/py2/pygments/lexers/__init__.py
index 100d807660..3443573b6d 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/__init__.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/__init__.py
@@ -20,13 +20,13 @@ from pygments.modeline import get_filetype_from_buffer
from pygments.plugin import find_plugin_lexers
from pygments.util import ClassNotFound, itervalues, guess_decode, text_type
-COMPAT = {
- 'Python3Lexer': 'PythonLexer',
- 'Python3TracebackLexer': 'PythonTracebackLexer',
-}
+COMPAT = {
+ 'Python3Lexer': 'PythonLexer',
+ 'Python3TracebackLexer': 'PythonTracebackLexer',
+}
__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
- 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT)
+ 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT)
_lexer_cache = {}
_pattern_cache = {}
@@ -147,8 +147,8 @@ def load_lexer_from_file(filename, lexername="CustomLexer", **options):
# And finally instantiate it with the options
return lexer_class(**options)
except IOError as err:
- raise ClassNotFound('cannot read %s: %s' % (filename, err))
- except ClassNotFound:
+ raise ClassNotFound('cannot read %s: %s' % (filename, err))
+ except ClassNotFound:
raise
except Exception as err:
raise ClassNotFound('error when loading custom lexer: %s' % err)
@@ -331,8 +331,8 @@ class _automodule(types.ModuleType):
cls = _lexer_cache[info[1]]
setattr(self, name, cls)
return cls
- if name in COMPAT:
- return getattr(self, COMPAT[name])
+ if name in COMPAT:
+ return getattr(self, COMPAT[name])
raise AttributeError(name)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_asy_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_asy_builtins.py
index b76c22aba8..4cd5ac1733 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_asy_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_asy_builtins.py
@@ -14,7 +14,7 @@
:license: BSD, see LICENSE for details.
"""
-ASYFUNCNAME = {
+ASYFUNCNAME = {
'AND',
'Arc',
'ArcArrow',
@@ -1038,9 +1038,9 @@ ASYFUNCNAME = {
'ztick',
'ztick3',
'ztrans'
-}
+}
-ASYVARNAME = {
+ASYVARNAME = {
'AliceBlue',
'Align',
'Allow',
@@ -1642,4 +1642,4 @@ ASYVARNAME = {
'ylabelwidth',
'zerotickfuzz',
'zerowinding'
-}
+}
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_cl_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_cl_builtins.py
index 7722e81f53..c28985ed43 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_cl_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_cl_builtins.py
@@ -9,7 +9,7 @@
:license: BSD, see LICENSE for details.
"""
-BUILTIN_FUNCTIONS = { # 638 functions
+BUILTIN_FUNCTIONS = { # 638 functions
'<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
'adjustable-array-p', 'adjust-array', 'allocate-instance',
@@ -157,17 +157,17 @@ BUILTIN_FUNCTIONS = { # 638 functions
'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
'y-or-n-p', 'zerop',
-}
+}
-SPECIAL_FORMS = {
+SPECIAL_FORMS = {
'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
'unwind-protect',
-}
+}
-MACROS = {
+MACROS = {
'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
'define-compiler-macro', 'define-condition', 'define-method-combination',
@@ -188,19 +188,19 @@ MACROS = {
'with-input-from-string', 'with-open-file', 'with-open-stream',
'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
'with-slots', 'with-standard-io-syntax',
-}
+}
-LAMBDA_LIST_KEYWORDS = {
+LAMBDA_LIST_KEYWORDS = {
'&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
'&rest', '&whole',
-}
+}
-DECLARATIONS = {
+DECLARATIONS = {
'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
'ignorable', 'notinline', 'type',
-}
+}
-BUILTIN_TYPES = {
+BUILTIN_TYPES = {
'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
@@ -217,9 +217,9 @@ BUILTIN_TYPES = {
'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
'undefined-function', 'warning',
-}
+}
-BUILTIN_CLASSES = {
+BUILTIN_CLASSES = {
'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
'file-stream', 'float', 'function', 'generic-function', 'hash-table',
@@ -229,4 +229,4 @@ BUILTIN_CLASSES = {
'standard-generic-function', 'standard-method', 'standard-object',
'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
-}
+}
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_cocoa_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_cocoa_builtins.py
index 2cf4443851..39cd6f75be 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_cocoa_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_cocoa_builtins.py
@@ -14,9 +14,9 @@
from __future__ import print_function
-COCOA_INTERFACES = {'UITableViewCell', 'HKCorrelationQuery', 'NSURLSessionDataTask', 'PHFetchOptions', 'NSLinguisticTagger', 'NSStream', 'AVAudioUnitDelay', 'GCMotion', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'AVAudioEnvironmentDistanceAttenuationParameters', 'HKStatisticsCollection', 'SCNPlane', 'CBPeer', 'JSContext', 'SCNTransaction', 'SCNTorus', 'AVAudioUnitEffect', 'UICollectionReusableView', 'MTLSamplerDescriptor', 'AVAssetReaderSampleReferenceOutput', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'SKRange', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UIVisualEffectView', 'CIWarpKernel', 'PKObject', 'MKRoute', 'MPVolumeView', 'UIPrintInfo', 'SCNText', 'ADClient', 'PKPayment', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'WKScriptMessage', 'AVMIDIPlayer', 'PHCollectionListChangeRequest', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'SKPaymentTransaction', 'NEOnDemandRuleConnect', 'NSShadow', 'SCNView', 'NSURLSessionConfiguration', 'MTLVertexAttributeDescriptor', 'CBCharacteristic', 'HKQuantityType', 'CKLocationSortDescriptor', 'NEVPNIKEv2SecurityAssociationParameters', 'CMStepCounter', 'NSNetService', 'AVAssetWriterInputMetadataAdaptor', 'UICollectionView', 'UIViewPrintFormatter', 'SCNLevelOfDetail', 'CAShapeLayer', 'MCPeerID', 'MPRatingCommand', 'WKNavigation', 'NSDictionary', 'NSFileVersion', 'CMGyroData', 'AVAudioUnitDistortion', 'CKFetchRecordsOperation', 'SKPhysicsJointSpring', 'SCNHitTestResult', 'AVAudioTime', 'CIFilter', 'UIView', 'SCNConstraint', 'CAPropertyAnimation', 'MKMapItem', 'MPRemoteCommandCenter', 'PKPaymentSummaryItem', 'UICollectionViewFlowLayoutInvalidationContext', 'UIInputViewController', 'PKPass', 'SCNPhysicsBehavior', 'MTLRenderPassColorAttachmentDescriptor', 'MKPolygonRenderer', 'CKNotification', 'JSValue', 'PHCollectionList', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'MPFeedbackCommand', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'NSLengthFormatter', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'SCNPyramid', 'PHAssetCollection', 'MKMapView', 'HMHomeManager', 'CATransition', 'MTLCompileOptions', 'UIVibrancyEffect', 'CLCircularRegion', 'MKTileOverlay', 'SCNShape', 'ACAccountCredential', 'SKPhysicsJointLimit', 'MKMapSnapshotter', 'AVMediaSelectionGroup', 'NSIndexSet', 'CBPeripheralManager', 'CKRecordZone', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'AVAudioOutputNode', 'MTLVertexAttributeDescriptorArray', 'MKETAResponse', 'SKTransition', 'SSReadingList', 'HKSourceQuery', 'UITableViewRowAction', 'UITableView', 'SCNParticlePropertyController', 'AVCaptureStillImageOutput', 'GCController', 'AVAudioPlayerNode', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NEOnDemandRuleEvaluateConnection', 'SKEffectNode', 'HKQuantity', 'GCControllerElement', 'AVPlayerItemAccessLogEvent', 'SCNBox', 'NSExtensionContext', 'MKOverlayRenderer', 'SCNPhysicsVehicle', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'CKQuery', 'AVAudioMixerNode', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'NSBatchUpdateRequest', 'PHContentEditingOutput', 'PHObjectChangeDetails', 'HKWorkoutType', 'MPMoviePlayerController', 'AVAudioFormat', 'HMTrigger', 'MTLRenderPassDepthAttachmentDescriptor', 'SCNRenderer', 'GKScore', 'UISplitViewController', 'HKSource', 'NSURLConnection', 'ABUnknownPersonViewController', 'SCNTechnique', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'AVAudioPCMBuffer', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'SKProductsRequest', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SCNFloor', 'NSSortDescriptor', 'CKFetchNotificationChangesOperation', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVAudioUnitGenerator', 'WKBackForwardList', 'SKMutableTexture', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'MPRatingCommandEvent', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'MPRemoteCommand', 'AVCaptureSession', 'UIStepper', 'UIRefreshControl', 'NEEvaluateConnectionRule', 'CKModifyRecordsOperation', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'PKShippingMethod', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UILexiconEntry', 'CMMotionActivity', 'SKAction', 'SKShader', 'AVPlayerItemOutput', 'MTLRenderPassAttachmentDescriptor', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'CAInterAppAudioTransportView', 'SKMutablePayment', 'UIImage', 'PHCachingImageManager', 'SCNTransformConstraint', 'HKCorrelationType', 'UIColor', 'SCNGeometrySource', 'AVCaptureAutoExposureBracketedStillImageSettings', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'UICollectionViewLayoutAttributes', 'AVAssetReaderOutputMetadataAdaptor', 'NSEntityMigrationPolicy', 'HMUser', 'NSLocale', 'NSURLSession', 'SCNCamera', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'HMServiceGroup', 'UIPasteboard', 'PHContentEditingInput', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSEnergyFormatter', 'SLRequest', 'HMCharacteristic', 'AVPlayerLayer', 'MTLRenderPassDescriptor', 'SKPayment', 'NSPointerArray', 'AVAudioMix', 'SCNLight', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'HKCategorySample', 'AVAudioEnvironmentReverbParameters', 'SCNMorpher', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'HMActionSet', 'CTSubscriber', 'PHAssetChangeRequest', 'NSPersistentStoreRequest', 'UITabBarController', 'HKQuantitySample', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UIPresentationController', 'UINavigationItem', 'PHFetchResultChangeDetails', 'PHImageManager', 'AVCaptureManualExposureBracketedStillImageSettings', 'UIStoryboardPopoverSegue', 'SCNLookAtConstraint', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'NEOnDemandRuleDisconnect', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSHTTPCookie', 'NSURLResponse', 'SKPaymentQueue', 'NSAssertionHandler', 'MKReverseGeocoder', 'GCControllerAxisInput', 'NSArray', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVMutableVideoCompositionInstruction', 'AVAssetReaderOutput', 'EAGLContext', 'WKFrameInfo', 'CMPedometer', 'MyClass', 'CKModifyBadgeOperation', 'AVCaptureAudioFileOutput', 'SKEmitterNode', 'NSMachPort', 'AVVideoCompositionCoreAnimationTool', 'PHCollection', 'SCNPhysicsWorld', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'CLFloor', 'AVAsynchronousVideoCompositionRequest', 'SCNGeometry', 'SCNIKConstraint', 'CIKernel', 'CAGradientLayer', 'HKCharacteristicType', 'NSFormatter', 'SCNAction', 'CATransaction', 'CBUUID', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'AVAudioUnit', 'MCBrowserViewController', 'UIFontDescriptor', 'NSRelationshipDescription', 'HKSample', 'WKWebView', 'NSMutableAttributedString', 'NSPersistentStoreAsynchronousResult', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'HKCorrelation', 'CATextLayer', 'NSNotificationQueue', 'UINib', 'GLKTextureLoader', 'HKObjectType', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'AVPlayerViewController', 'CAScrollLayer', 'GKSavedGame', 'NSTextCheckingResult', 'PHObjectPlaceholder', 'SKConstraint', 'EKEventEditViewController', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'SCNNode', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'NEOnDemandRule', 'MTLRenderPassStencilAttachmentDescriptor', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'NEOnDemandRuleIgnore', 'SKRegion', 'AVAssetResourceLoader', 'EAWiFiUnconfiguredAccessoryBrowser', 'NSUserActivity', 'CTCall', 'UIPrinterPickerController', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'CKRecordZoneID', 'HKAnchoredObjectQuery', 'CKFetchRecordZonesOperation', 'UIStoryboardSegue', 'ACAccountType', 'GKSession', 'SKVideoNode', 'PHChange', 'SKReceiptRefreshRequest', 'GCExtendedGamepadSnapshot', 'MPSeekCommandEvent', 'GCExtendedGamepad', 'CAValueFunction', 'SCNCylinder', 'NSNotification', 'NSBatchUpdateResult', 'PKPushCredentials', 'SCNPhysicsSliderJoint', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'UILexicon', 'MTLArrayType', 'AVAudioUnitReverb', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIPrinter', 'NSOrderedSet', 'CBAttribute', 'PKPushPayload', 'NSIncrementalStoreNode', 'EKEventStore', 'MPRemoteCommandEvent', 'UISlider', 'UIBlurEffect', 'CKAsset', 'AVCaptureInput', 'AVAudioEngine', 'MTLVertexDescriptor', 'SKPhysicsBody', 'NSOperation', 'PKPaymentPass', 'UIImageAsset', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CABTMIDILocalPeripheralViewController', 'NEVPNManager', 'HKQuery', 'SCNPhysicsContact', 'CBMutableService', 'AVSampleBufferDisplayLayer', 'SCNSceneSource', 'SKLightNode', 'CKDiscoveredUserInfo', 'NSMutableArray', 'MTLDepthStencilDescriptor', 'MTLArgument', 'NSMassFormatter', 'CIRectangleFeature', 'PKPushRegistry', 'NEVPNConnection', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'HKWorkout', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'MPChangePlaybackRateCommandEvent', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'AVAudioUnitSampler', 'NSItemProvider', 'SKUniform', 'MPMediaPickerController', 'CKOperation', 'MTLRenderPipelineDescriptor', 'EAWiFiUnconfiguredAccessory', 'NSFileCoordinator', 'SKRequest', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'SCNCone', 'MPMediaItem', 'SCNMaterial', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NEVPNProtocol', 'NSCompoundPredicate', 'HKHealthStore', 'MKMultiPoint', 'HKSampleType', 'UIPrintFormatter', 'AVAudioUnitEQFilterParameters', 'SKView', 'NSConstantString', 'UIPopoverController', 'CKDatabase', 'AVMetadataFaceObject', 'UIAccelerometer', 'EKEventViewController', 'CMAltitudeData', 'MTLStencilDescriptor', 'UISwipeGestureRecognizer', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'NSAsynchronousFetchRequest', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'CKNotificationID', 'AVAudioSession', 'HKUnit', 'NSNull', 'NSPersistentStoreResult', 'MKCircleView', 'AVAudioChannelLayout', 'NEVPNProtocolIKEv2', 'WKProcessPool', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'CIQRCodeFeature', 'AVMutableVideoComposition', 'PHFetchResult', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'PKPaymentRequest', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'AVAudioUnitTimePitch', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'UIDocumentPickerViewController', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'AVAudioFile', 'UIActivity', 'NSPipe', 'MKShape', 'NSMergeConflict', 'CIImage', 'HKObject', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'CKMarkNotificationsReadOperation', 'CKSubscription', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'HMAccessoryBrowser', 'GKTurnBasedEventHandler', 'UIWebView', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'WKNavigationResponse', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'HKStatistics', 'UILocalNotification', 'HKBiologicalSexObject', 'AVURLAsset', 'CBPeripheral', 'NSDateComponentsFormatter', 'SKSpriteNode', 'UIAccessibilityElement', 'AVAssetWriterInputGroup', 'HMZone', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'PHPhotoLibrary', 'CBService', 'UIDocumentMenuViewController', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'ASIdentifierManager', 'AVAssetResourceLoadingRequest', 'SLComposeServiceViewController', 'UIPinchGestureRecognizer', 'PHObject', 'NSExtensionItem', 'HKSampleQuery', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MKRouteStep', 'SCNCapsule', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'UITraitCollection', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'MTLRenderPipelineReflection', 'CLHeading', 'CLVisit', 'MKDirectionsResponse', 'HMAccessory', 'MTLStructType', 'UITextView', 'CMMagnetometerData', 'UICollisionBehavior', 'UIProgressView', 'CKServerChangeToken', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'NSPropertyMapping', 'ALAssetsFilter', 'SK3DNode', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'SCNSphere', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSFileProviderExtension', 'NSDecimalNumberHandler', 'NSAtomicStoreCacheNode', 'NSAtomicStore', 'EKAlarm', 'CKNotificationInfo', 'AVAudioUnitEQ', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'MTLVertexAttribute', 'NSExpressionDescription', 'HKStatisticsCollectionQuery', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'UIAlertAction', 'NSPropertyListSerialization', 'GKPeerPickerController', 'UIUserNotificationSettings', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'MTLRenderPassColorAttachmentDescriptorArray', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'UIViewController', 'PKPaymentToken', 'MFMessageComposeViewController', 'AVAudioInputNode', 'NSDataDetector', 'CABTMIDICentralViewController', 'AVAudioUnitMIDIInstrument', 'AVCaptureVideoPreviewLayer', 'AVAssetWriterInputPassDescription', 'MPChangePlaybackRateCommand', 'NSURLComponents', 'CAMetalLayer', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'CKDiscoverUserInfosOperation', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'SCNGeometryElement', 'NCWidgetController', 'CAEmitterLayer', 'MKUserLocation', 'UIImagePickerController', 'CIFeature', 'AVCaptureDeviceInput', 'ALAsset', 'NSURLSessionDownloadTask', 'SCNPhysicsHingeJoint', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'SCNMaterialProperty', 'UIFont', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'WKWebViewConfiguration', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'AVComposition', 'CKDiscoverAllContactsOperation', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'UIMutableUserNotificationCategory', 'NSLock', 'NEVPNProtocolIPSec', 'ADBannerView', 'UIDocumentPickerExtensionViewController', 'UIActivityIndicatorView', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIAccessibilityCustomAction', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'HKBloodTypeObject', 'MTLVertexBufferLayoutDescriptorArray', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'PKPaymentAuthorizationViewController', 'UIScrollView', 'WKNavigationAction', 'AVPlayerItemMetadataOutput', 'EKRecurrenceDayOfWeek', 'NSNumberFormatter', 'MTLComputePipelineReflection', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'SCNSkinner', 'AVCaptureMetadataOutput', 'SCNAnimationEvent', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'WKBackForwardListItem', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKCalendar', 'HKWorkoutEvent', 'NSMutableURLRequest', 'UIVideoEditorController', 'HMTimerTrigger', 'AVAudioUnitVarispeed', 'UIDynamicAnimator', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'UIPushBehavior', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'WKUserScript', 'EKEvent', 'NSDateFormatter', 'NSAsynchronousFetchResult', 'AVAssetWriterInputPixelBufferAdaptor', 'UIVisualEffect', 'UICollectionViewCell', 'UITextField', 'CLPlacemark', 'MPPlayableContentManager', 'AVCaptureOutput', 'HMCharacteristicWriteAction', 'CKModifySubscriptionsOperation', 'NSPropertyDescription', 'GCGamepad', 'UIMarkupTextPrintFormatter', 'SCNTube', 'NSPersistentStoreCoordinator', 'AVAudioEnvironmentNode', 'GKMatchmaker', 'CIContext', 'NSThread', 'SLComposeSheetConfigurationItem', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'SCNPhysicsBallSocketJoint', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'AVAudioIONode', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CKRecordZoneNotification', 'SCNScene', 'MPMovieAccessLogEvent', 'CKFetchSubscriptionsOperation', 'CAEmitterCell', 'AVAudioUnitTimeEffect', 'HMCharacteristicMetadata', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'UIUserNotificationCategory', 'SCNPhysicsVehicleWheel', 'HKCategoryType', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'MTLRenderPipelineColorAttachmentDescriptor', 'SCNPhysicsShape', 'UISearchController', 'SCNPhysicsBody', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'SKNode', 'UIMutableUserNotificationAction', 'SCNProgram', 'AVSpeechSynthesisVoice', 'CMAltimeter', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SCNPhysicsField', 'CKReference', 'LAContext', 'CKRecordID', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'AVAudioBuffer', 'CIColorKernel', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'CKQueryNotification', 'PHAdjustmentData', 'EASession', 'AVAssetResourceRenewalRequest', 'UIInputView', 'NSFileWrapper', 'UIResponder', 'NSPointerFunctions', 'UIKeyCommand', 'NSHTTPCookieStorage', 'AVMediaSelectionOption', 'NSRunLoop', 'NSFileAccessIntent', 'CAAnimationGroup', 'MKCircle', 'UIAlertController', 'NSMigrationManager', 'NSDateIntervalFormatter', 'UICollectionViewUpdateItem', 'CKDatabaseOperation', 'PHImageRequestOptions', 'SKReachConstraints', 'CKRecord', 'CAInterAppAudioSwitcherView', 'WKWindowFeatures', 'GKInvite', 'NSMutableData', 'PHAssetCollectionChangeRequest', 'NSMutableParagraphStyle', 'UIDynamicBehavior', 'GLKEffectProperty', 'CKFetchRecordChangesOperation', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'MPContentItem', 'HMAction', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'CKContainer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'AVCaptureMovieFileOutput', 'HMRoom', 'GKChallenge', 'UITextRange', 'NSURLProtectionSpace', 'ACAccountStore', 'MPSkipIntervalCommand', 'NSComparisonPredicate', 'HMHome', 'PHVideoRequestOptions', 'NSOutputStream', 'MPSkipIntervalCommandEvent', 'PKAddPassesViewController', 'UITextSelectionRect', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'UICollectionViewController', 'AVAudioNode', 'MCNearbyServiceAdvertiser', 'NSObject', 'PHAsset', 'GKLeaderboardViewController', 'CKQueryCursor', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'CMPedometerData', 'HMService', 'SKFieldNode', 'GKAchievement', 'WKUserContentController', 'AVAssetTrack', 'TWRequest', 'SKLabelNode', 'AVCaptureBracketedStillImageSettings', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'MTLTextureDescriptor', 'MTLVertexBufferLayoutDescriptor', 'MPFeedbackCommandEvent', 'UIUserNotificationAction', 'HKStatisticsQuery', 'SCNParticleSystem', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'HKObserverQuery', 'UIPopoverPresentationController', 'CKQueryOperation', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'NSURLQueryItem', 'MTLStructMember', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'GKTurnBasedParticipant', 'NSURLProtocol', 'NSUserDefaults', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'CKModifyRecordZonesOperation', 'WKPreferences', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'PHContentEditingInputRequestOptions', 'GKMatch', 'CIColor', 'UIDictationPhrase'}
-COCOA_PROTOCOLS = {'SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'MTLComputeCommandEncoder', 'SCNSceneExportDelegate', 'UISearchResultsUpdating', 'MFMailComposeViewControllerDelegate', 'MTLBlitCommandEncoder', 'NSDecimalNumberBehaviors', 'PHContentEditingController', 'NSMutableCopying', 'UIActionSheetDelegate', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UIAdaptivePresentationControllerDelegate', 'GKAchievementViewControllerDelegate', 'UIViewControllerTransitionCoordinator', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'UITableViewDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'AVAudio3DMixing', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'HMAccessoryBrowserDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'CKRecordValue', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'SKSceneDelegate', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'MyClassJavaScriptMethods', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'SCNBoundingVolume', 'NSPortDelegate', 'UIContentContainer', 'SCNNodeRendererDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'HMAccessoryDelegate', 'UIPageViewControllerDataSource', 'SCNSceneRendererDelegate', 'SCNPhysicsContactDelegate', 'MKMapViewDelegate', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'MTLRenderCommandEncoder', 'PKPaymentAuthorizationViewControllerDelegate', 'UIToolbarDelegate', 'WKUIDelegate', 'SCNActionable', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'PKPushRegistryDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSLayoutManagerDelegate', 'MTLLibrary', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'MTLResource', 'NSDiscardableContent', 'UITextFieldDelegate', 'MTLBuffer', 'MTLSamplerState', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UISplitViewControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UITraitEnvironment', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'MPPlayableContentDataSource', 'MTLComputePipelineState', 'NSURLSessionDelegate', 'MTLCommandBuffer', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MTLDepthStencilState', 'GKTurnBasedMatchmakerViewControllerDelegate', 'MPPlayableContentDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'UITextDocumentProxy', 'MTLDrawable', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'AVAudioStereoMixing', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'SCNProgramDelegate', 'AVVideoCompositing', 'SCNAnimatable', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'NSExtensionRequestHandling', 'UITextSelecting', 'UIPrinterPickerControllerDelegate', 'NCWidgetProviding', 'MTLCommandEncoder', 'NSURLProtocolClient', 'MFMessageComposeViewControllerDelegate', 'UIVideoEditorControllerDelegate', 'WKNavigationDelegate', 'GKSavedGameListener', 'UITableViewDataSource', 'MTLFunction', 'EKCalendarChooserDelegate', 'NSUserActivityDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'HMHomeDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIDocumentPickerDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'SCNSceneRenderer', 'MTLTexture', 'GLKViewDelegate', 'EAAccessoryDelegate', 'WKScriptMessageHandler', 'PHPhotoLibraryChangeObserver', 'NSKeyedUnarchiverDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'NSMachPortDelegate', 'SCNShadable', 'UIPopoverBackgroundViewMethods', 'UIDocumentMenuDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'AVAudioMixing', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'MTLParallelRenderCommandEncoder', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UIPopoverPresentationControllerDelegate', 'UIDynamicAnimatorDelegate', 'NSTextAttachmentContainer', 'MKAnnotation', 'UIAccessibilityIdentification', 'UICoordinateSpace', 'ABNewPersonViewControllerDelegate', 'MTLDevice', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'HMHomeManagerDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'UISearchControllerDelegate', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'UITextInputTraits', 'MTLRenderPipelineState', 'GKVoiceChatClient', 'UIKeyInput', 'UICollectionViewDataSource', 'SCNTechniqueSupport', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'MTLCommandQueue', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'CAMetalDrawable', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'NSURLSessionDataDelegate'}
-COCOA_PRIMITIVES = {'ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', '_GLKMatrix3', '_GLKMatrix2', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'OpaqueCMBlockBuffer', 'CGTextDrawingMode', 'EKErrorCode', 'gss_buffer_desc_struct', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'AudioFile_SMPTE_Time', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', '__CFBinaryHeap', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', 'SFNTLookupSegment', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__SecCertificate', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', 'AnchorPoint', 'JustTable', '__CFNetService', 'CF_BRIDGED_TYPE', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'KerxSubtableHeader', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', 'OpaqueJSClass', 'CGGradient', 'OpaqueMIDISetup', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', 'MIDIThruConnectionParams', 'BslnFormat0Part', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'AnkrTable', '__SCNetworkProtocol', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'SCNMatrix4', 'KerxControlPointEntry', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'KernStateEntry', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'AVAudio3DAngularOrientation', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'KernVersion0Header', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'GCQuaternion', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'CGImagePropertyOrientation', 'CGPDFScanner', 'OpaqueMusicEventIterator', 'sfntDescriptorHeader', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'BslnFormat1Part', 'sfntFontDescriptor', 'KernSimpleArrayHeader', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGInterpolationQuality', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', '__CTRubyAnnotation', 'SCNVector4', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__GLsync', '__CFStringTokenizer', 'JustWidthDeltaEntry', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', '__CVMetalTextureCache', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'AVAudio3DPoint', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', 'CGDataConsumerCallbacks', 'ALMXHeader', 'CGLineCap', 'MIDIControlTransform', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'AVAudio3DVectorOrientation', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'OpaqueVTCompressionSession', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'VTDecompressionOutputCallbackRecord', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'OpaqueVTDecompressionSession', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'PropTable', 'KernOrderedListEntry', 'CF_BRIDGED_MUTABLE_TYPE', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'SCNVector3', 'OpaqueAudioConverter', 'MIDIRawData', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', '__SecAccessControl', 'JustPCAction', 'OpaqueVTFrameSilo', 'OpaqueVTMultiPassStorage', 'CGPathElementType', 'AudioFormatListItem', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'sfntCMapHeader', '__CFURLEnumerator', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader'}
+COCOA_INTERFACES = {'UITableViewCell', 'HKCorrelationQuery', 'NSURLSessionDataTask', 'PHFetchOptions', 'NSLinguisticTagger', 'NSStream', 'AVAudioUnitDelay', 'GCMotion', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'AVAudioEnvironmentDistanceAttenuationParameters', 'HKStatisticsCollection', 'SCNPlane', 'CBPeer', 'JSContext', 'SCNTransaction', 'SCNTorus', 'AVAudioUnitEffect', 'UICollectionReusableView', 'MTLSamplerDescriptor', 'AVAssetReaderSampleReferenceOutput', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'SKRange', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UIVisualEffectView', 'CIWarpKernel', 'PKObject', 'MKRoute', 'MPVolumeView', 'UIPrintInfo', 'SCNText', 'ADClient', 'PKPayment', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'WKScriptMessage', 'AVMIDIPlayer', 'PHCollectionListChangeRequest', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'SKPaymentTransaction', 'NEOnDemandRuleConnect', 'NSShadow', 'SCNView', 'NSURLSessionConfiguration', 'MTLVertexAttributeDescriptor', 'CBCharacteristic', 'HKQuantityType', 'CKLocationSortDescriptor', 'NEVPNIKEv2SecurityAssociationParameters', 'CMStepCounter', 'NSNetService', 'AVAssetWriterInputMetadataAdaptor', 'UICollectionView', 'UIViewPrintFormatter', 'SCNLevelOfDetail', 'CAShapeLayer', 'MCPeerID', 'MPRatingCommand', 'WKNavigation', 'NSDictionary', 'NSFileVersion', 'CMGyroData', 'AVAudioUnitDistortion', 'CKFetchRecordsOperation', 'SKPhysicsJointSpring', 'SCNHitTestResult', 'AVAudioTime', 'CIFilter', 'UIView', 'SCNConstraint', 'CAPropertyAnimation', 'MKMapItem', 'MPRemoteCommandCenter', 'PKPaymentSummaryItem', 'UICollectionViewFlowLayoutInvalidationContext', 'UIInputViewController', 'PKPass', 'SCNPhysicsBehavior', 'MTLRenderPassColorAttachmentDescriptor', 'MKPolygonRenderer', 'CKNotification', 'JSValue', 'PHCollectionList', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'MPFeedbackCommand', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'NSLengthFormatter', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'SCNPyramid', 'PHAssetCollection', 'MKMapView', 'HMHomeManager', 'CATransition', 'MTLCompileOptions', 'UIVibrancyEffect', 'CLCircularRegion', 'MKTileOverlay', 'SCNShape', 'ACAccountCredential', 'SKPhysicsJointLimit', 'MKMapSnapshotter', 'AVMediaSelectionGroup', 'NSIndexSet', 'CBPeripheralManager', 'CKRecordZone', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'AVAudioOutputNode', 'MTLVertexAttributeDescriptorArray', 'MKETAResponse', 'SKTransition', 'SSReadingList', 'HKSourceQuery', 'UITableViewRowAction', 'UITableView', 'SCNParticlePropertyController', 'AVCaptureStillImageOutput', 'GCController', 'AVAudioPlayerNode', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NEOnDemandRuleEvaluateConnection', 'SKEffectNode', 'HKQuantity', 'GCControllerElement', 'AVPlayerItemAccessLogEvent', 'SCNBox', 'NSExtensionContext', 'MKOverlayRenderer', 'SCNPhysicsVehicle', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'CKQuery', 'AVAudioMixerNode', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'NSBatchUpdateRequest', 'PHContentEditingOutput', 'PHObjectChangeDetails', 'HKWorkoutType', 'MPMoviePlayerController', 'AVAudioFormat', 'HMTrigger', 'MTLRenderPassDepthAttachmentDescriptor', 'SCNRenderer', 'GKScore', 'UISplitViewController', 'HKSource', 'NSURLConnection', 'ABUnknownPersonViewController', 'SCNTechnique', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'AVAudioPCMBuffer', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'SKProductsRequest', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SCNFloor', 'NSSortDescriptor', 'CKFetchNotificationChangesOperation', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVAudioUnitGenerator', 'WKBackForwardList', 'SKMutableTexture', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'MPRatingCommandEvent', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'MPRemoteCommand', 'AVCaptureSession', 'UIStepper', 'UIRefreshControl', 'NEEvaluateConnectionRule', 'CKModifyRecordsOperation', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'PKShippingMethod', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UILexiconEntry', 'CMMotionActivity', 'SKAction', 'SKShader', 'AVPlayerItemOutput', 'MTLRenderPassAttachmentDescriptor', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'CAInterAppAudioTransportView', 'SKMutablePayment', 'UIImage', 'PHCachingImageManager', 'SCNTransformConstraint', 'HKCorrelationType', 'UIColor', 'SCNGeometrySource', 'AVCaptureAutoExposureBracketedStillImageSettings', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'UICollectionViewLayoutAttributes', 'AVAssetReaderOutputMetadataAdaptor', 'NSEntityMigrationPolicy', 'HMUser', 'NSLocale', 'NSURLSession', 'SCNCamera', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'HMServiceGroup', 'UIPasteboard', 'PHContentEditingInput', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSEnergyFormatter', 'SLRequest', 'HMCharacteristic', 'AVPlayerLayer', 'MTLRenderPassDescriptor', 'SKPayment', 'NSPointerArray', 'AVAudioMix', 'SCNLight', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'HKCategorySample', 'AVAudioEnvironmentReverbParameters', 'SCNMorpher', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'HMActionSet', 'CTSubscriber', 'PHAssetChangeRequest', 'NSPersistentStoreRequest', 'UITabBarController', 'HKQuantitySample', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UIPresentationController', 'UINavigationItem', 'PHFetchResultChangeDetails', 'PHImageManager', 'AVCaptureManualExposureBracketedStillImageSettings', 'UIStoryboardPopoverSegue', 'SCNLookAtConstraint', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'NEOnDemandRuleDisconnect', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSHTTPCookie', 'NSURLResponse', 'SKPaymentQueue', 'NSAssertionHandler', 'MKReverseGeocoder', 'GCControllerAxisInput', 'NSArray', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVMutableVideoCompositionInstruction', 'AVAssetReaderOutput', 'EAGLContext', 'WKFrameInfo', 'CMPedometer', 'MyClass', 'CKModifyBadgeOperation', 'AVCaptureAudioFileOutput', 'SKEmitterNode', 'NSMachPort', 'AVVideoCompositionCoreAnimationTool', 'PHCollection', 'SCNPhysicsWorld', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'CLFloor', 'AVAsynchronousVideoCompositionRequest', 'SCNGeometry', 'SCNIKConstraint', 'CIKernel', 'CAGradientLayer', 'HKCharacteristicType', 'NSFormatter', 'SCNAction', 'CATransaction', 'CBUUID', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'AVAudioUnit', 'MCBrowserViewController', 'UIFontDescriptor', 'NSRelationshipDescription', 'HKSample', 'WKWebView', 'NSMutableAttributedString', 'NSPersistentStoreAsynchronousResult', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'HKCorrelation', 'CATextLayer', 'NSNotificationQueue', 'UINib', 'GLKTextureLoader', 'HKObjectType', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'AVPlayerViewController', 'CAScrollLayer', 'GKSavedGame', 'NSTextCheckingResult', 'PHObjectPlaceholder', 'SKConstraint', 'EKEventEditViewController', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'SCNNode', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'NEOnDemandRule', 'MTLRenderPassStencilAttachmentDescriptor', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'NEOnDemandRuleIgnore', 'SKRegion', 'AVAssetResourceLoader', 'EAWiFiUnconfiguredAccessoryBrowser', 'NSUserActivity', 'CTCall', 'UIPrinterPickerController', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'CKRecordZoneID', 'HKAnchoredObjectQuery', 'CKFetchRecordZonesOperation', 'UIStoryboardSegue', 'ACAccountType', 'GKSession', 'SKVideoNode', 'PHChange', 'SKReceiptRefreshRequest', 'GCExtendedGamepadSnapshot', 'MPSeekCommandEvent', 'GCExtendedGamepad', 'CAValueFunction', 'SCNCylinder', 'NSNotification', 'NSBatchUpdateResult', 'PKPushCredentials', 'SCNPhysicsSliderJoint', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'UILexicon', 'MTLArrayType', 'AVAudioUnitReverb', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIPrinter', 'NSOrderedSet', 'CBAttribute', 'PKPushPayload', 'NSIncrementalStoreNode', 'EKEventStore', 'MPRemoteCommandEvent', 'UISlider', 'UIBlurEffect', 'CKAsset', 'AVCaptureInput', 'AVAudioEngine', 'MTLVertexDescriptor', 'SKPhysicsBody', 'NSOperation', 'PKPaymentPass', 'UIImageAsset', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CABTMIDILocalPeripheralViewController', 'NEVPNManager', 'HKQuery', 'SCNPhysicsContact', 'CBMutableService', 'AVSampleBufferDisplayLayer', 'SCNSceneSource', 'SKLightNode', 'CKDiscoveredUserInfo', 'NSMutableArray', 'MTLDepthStencilDescriptor', 'MTLArgument', 'NSMassFormatter', 'CIRectangleFeature', 'PKPushRegistry', 'NEVPNConnection', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'HKWorkout', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'MPChangePlaybackRateCommandEvent', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'AVAudioUnitSampler', 'NSItemProvider', 'SKUniform', 'MPMediaPickerController', 'CKOperation', 'MTLRenderPipelineDescriptor', 'EAWiFiUnconfiguredAccessory', 'NSFileCoordinator', 'SKRequest', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'SCNCone', 'MPMediaItem', 'SCNMaterial', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NEVPNProtocol', 'NSCompoundPredicate', 'HKHealthStore', 'MKMultiPoint', 'HKSampleType', 'UIPrintFormatter', 'AVAudioUnitEQFilterParameters', 'SKView', 'NSConstantString', 'UIPopoverController', 'CKDatabase', 'AVMetadataFaceObject', 'UIAccelerometer', 'EKEventViewController', 'CMAltitudeData', 'MTLStencilDescriptor', 'UISwipeGestureRecognizer', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'NSAsynchronousFetchRequest', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'CKNotificationID', 'AVAudioSession', 'HKUnit', 'NSNull', 'NSPersistentStoreResult', 'MKCircleView', 'AVAudioChannelLayout', 'NEVPNProtocolIKEv2', 'WKProcessPool', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'CIQRCodeFeature', 'AVMutableVideoComposition', 'PHFetchResult', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'PKPaymentRequest', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'AVAudioUnitTimePitch', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'UIDocumentPickerViewController', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'AVAudioFile', 'UIActivity', 'NSPipe', 'MKShape', 'NSMergeConflict', 'CIImage', 'HKObject', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'CKMarkNotificationsReadOperation', 'CKSubscription', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'HMAccessoryBrowser', 'GKTurnBasedEventHandler', 'UIWebView', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'WKNavigationResponse', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'HKStatistics', 'UILocalNotification', 'HKBiologicalSexObject', 'AVURLAsset', 'CBPeripheral', 'NSDateComponentsFormatter', 'SKSpriteNode', 'UIAccessibilityElement', 'AVAssetWriterInputGroup', 'HMZone', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'PHPhotoLibrary', 'CBService', 'UIDocumentMenuViewController', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'ASIdentifierManager', 'AVAssetResourceLoadingRequest', 'SLComposeServiceViewController', 'UIPinchGestureRecognizer', 'PHObject', 'NSExtensionItem', 'HKSampleQuery', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MKRouteStep', 'SCNCapsule', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'UITraitCollection', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'MTLRenderPipelineReflection', 'CLHeading', 'CLVisit', 'MKDirectionsResponse', 'HMAccessory', 'MTLStructType', 'UITextView', 'CMMagnetometerData', 'UICollisionBehavior', 'UIProgressView', 'CKServerChangeToken', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'NSPropertyMapping', 'ALAssetsFilter', 'SK3DNode', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'SCNSphere', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSFileProviderExtension', 'NSDecimalNumberHandler', 'NSAtomicStoreCacheNode', 'NSAtomicStore', 'EKAlarm', 'CKNotificationInfo', 'AVAudioUnitEQ', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'MTLVertexAttribute', 'NSExpressionDescription', 'HKStatisticsCollectionQuery', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'UIAlertAction', 'NSPropertyListSerialization', 'GKPeerPickerController', 'UIUserNotificationSettings', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'MTLRenderPassColorAttachmentDescriptorArray', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'UIViewController', 'PKPaymentToken', 'MFMessageComposeViewController', 'AVAudioInputNode', 'NSDataDetector', 'CABTMIDICentralViewController', 'AVAudioUnitMIDIInstrument', 'AVCaptureVideoPreviewLayer', 'AVAssetWriterInputPassDescription', 'MPChangePlaybackRateCommand', 'NSURLComponents', 'CAMetalLayer', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'CKDiscoverUserInfosOperation', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'SCNGeometryElement', 'NCWidgetController', 'CAEmitterLayer', 'MKUserLocation', 'UIImagePickerController', 'CIFeature', 'AVCaptureDeviceInput', 'ALAsset', 'NSURLSessionDownloadTask', 'SCNPhysicsHingeJoint', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'SCNMaterialProperty', 'UIFont', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'WKWebViewConfiguration', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'AVComposition', 'CKDiscoverAllContactsOperation', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'UIMutableUserNotificationCategory', 'NSLock', 'NEVPNProtocolIPSec', 'ADBannerView', 'UIDocumentPickerExtensionViewController', 'UIActivityIndicatorView', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIAccessibilityCustomAction', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'HKBloodTypeObject', 'MTLVertexBufferLayoutDescriptorArray', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'PKPaymentAuthorizationViewController', 'UIScrollView', 'WKNavigationAction', 'AVPlayerItemMetadataOutput', 'EKRecurrenceDayOfWeek', 'NSNumberFormatter', 'MTLComputePipelineReflection', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'SCNSkinner', 'AVCaptureMetadataOutput', 'SCNAnimationEvent', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'WKBackForwardListItem', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKCalendar', 'HKWorkoutEvent', 'NSMutableURLRequest', 'UIVideoEditorController', 'HMTimerTrigger', 'AVAudioUnitVarispeed', 'UIDynamicAnimator', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'UIPushBehavior', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'WKUserScript', 'EKEvent', 'NSDateFormatter', 'NSAsynchronousFetchResult', 'AVAssetWriterInputPixelBufferAdaptor', 'UIVisualEffect', 'UICollectionViewCell', 'UITextField', 'CLPlacemark', 'MPPlayableContentManager', 'AVCaptureOutput', 'HMCharacteristicWriteAction', 'CKModifySubscriptionsOperation', 'NSPropertyDescription', 'GCGamepad', 'UIMarkupTextPrintFormatter', 'SCNTube', 'NSPersistentStoreCoordinator', 'AVAudioEnvironmentNode', 'GKMatchmaker', 'CIContext', 'NSThread', 'SLComposeSheetConfigurationItem', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'SCNPhysicsBallSocketJoint', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'AVAudioIONode', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CKRecordZoneNotification', 'SCNScene', 'MPMovieAccessLogEvent', 'CKFetchSubscriptionsOperation', 'CAEmitterCell', 'AVAudioUnitTimeEffect', 'HMCharacteristicMetadata', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'UIUserNotificationCategory', 'SCNPhysicsVehicleWheel', 'HKCategoryType', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'MTLRenderPipelineColorAttachmentDescriptor', 'SCNPhysicsShape', 'UISearchController', 'SCNPhysicsBody', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'SKNode', 'UIMutableUserNotificationAction', 'SCNProgram', 'AVSpeechSynthesisVoice', 'CMAltimeter', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SCNPhysicsField', 'CKReference', 'LAContext', 'CKRecordID', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'AVAudioBuffer', 'CIColorKernel', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'CKQueryNotification', 'PHAdjustmentData', 'EASession', 'AVAssetResourceRenewalRequest', 'UIInputView', 'NSFileWrapper', 'UIResponder', 'NSPointerFunctions', 'UIKeyCommand', 'NSHTTPCookieStorage', 'AVMediaSelectionOption', 'NSRunLoop', 'NSFileAccessIntent', 'CAAnimationGroup', 'MKCircle', 'UIAlertController', 'NSMigrationManager', 'NSDateIntervalFormatter', 'UICollectionViewUpdateItem', 'CKDatabaseOperation', 'PHImageRequestOptions', 'SKReachConstraints', 'CKRecord', 'CAInterAppAudioSwitcherView', 'WKWindowFeatures', 'GKInvite', 'NSMutableData', 'PHAssetCollectionChangeRequest', 'NSMutableParagraphStyle', 'UIDynamicBehavior', 'GLKEffectProperty', 'CKFetchRecordChangesOperation', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'MPContentItem', 'HMAction', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'CKContainer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'AVCaptureMovieFileOutput', 'HMRoom', 'GKChallenge', 'UITextRange', 'NSURLProtectionSpace', 'ACAccountStore', 'MPSkipIntervalCommand', 'NSComparisonPredicate', 'HMHome', 'PHVideoRequestOptions', 'NSOutputStream', 'MPSkipIntervalCommandEvent', 'PKAddPassesViewController', 'UITextSelectionRect', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'UICollectionViewController', 'AVAudioNode', 'MCNearbyServiceAdvertiser', 'NSObject', 'PHAsset', 'GKLeaderboardViewController', 'CKQueryCursor', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'CMPedometerData', 'HMService', 'SKFieldNode', 'GKAchievement', 'WKUserContentController', 'AVAssetTrack', 'TWRequest', 'SKLabelNode', 'AVCaptureBracketedStillImageSettings', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'MTLTextureDescriptor', 'MTLVertexBufferLayoutDescriptor', 'MPFeedbackCommandEvent', 'UIUserNotificationAction', 'HKStatisticsQuery', 'SCNParticleSystem', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'HKObserverQuery', 'UIPopoverPresentationController', 'CKQueryOperation', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'NSURLQueryItem', 'MTLStructMember', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'GKTurnBasedParticipant', 'NSURLProtocol', 'NSUserDefaults', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'CKModifyRecordZonesOperation', 'WKPreferences', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'PHContentEditingInputRequestOptions', 'GKMatch', 'CIColor', 'UIDictationPhrase'}
+COCOA_PROTOCOLS = {'SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'MTLComputeCommandEncoder', 'SCNSceneExportDelegate', 'UISearchResultsUpdating', 'MFMailComposeViewControllerDelegate', 'MTLBlitCommandEncoder', 'NSDecimalNumberBehaviors', 'PHContentEditingController', 'NSMutableCopying', 'UIActionSheetDelegate', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UIAdaptivePresentationControllerDelegate', 'GKAchievementViewControllerDelegate', 'UIViewControllerTransitionCoordinator', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'UITableViewDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'AVAudio3DMixing', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'HMAccessoryBrowserDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'CKRecordValue', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'SKSceneDelegate', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'MyClassJavaScriptMethods', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'SCNBoundingVolume', 'NSPortDelegate', 'UIContentContainer', 'SCNNodeRendererDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'HMAccessoryDelegate', 'UIPageViewControllerDataSource', 'SCNSceneRendererDelegate', 'SCNPhysicsContactDelegate', 'MKMapViewDelegate', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'MTLRenderCommandEncoder', 'PKPaymentAuthorizationViewControllerDelegate', 'UIToolbarDelegate', 'WKUIDelegate', 'SCNActionable', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'PKPushRegistryDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSLayoutManagerDelegate', 'MTLLibrary', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'MTLResource', 'NSDiscardableContent', 'UITextFieldDelegate', 'MTLBuffer', 'MTLSamplerState', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UISplitViewControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UITraitEnvironment', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'MPPlayableContentDataSource', 'MTLComputePipelineState', 'NSURLSessionDelegate', 'MTLCommandBuffer', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MTLDepthStencilState', 'GKTurnBasedMatchmakerViewControllerDelegate', 'MPPlayableContentDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'UITextDocumentProxy', 'MTLDrawable', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'AVAudioStereoMixing', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'SCNProgramDelegate', 'AVVideoCompositing', 'SCNAnimatable', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'NSExtensionRequestHandling', 'UITextSelecting', 'UIPrinterPickerControllerDelegate', 'NCWidgetProviding', 'MTLCommandEncoder', 'NSURLProtocolClient', 'MFMessageComposeViewControllerDelegate', 'UIVideoEditorControllerDelegate', 'WKNavigationDelegate', 'GKSavedGameListener', 'UITableViewDataSource', 'MTLFunction', 'EKCalendarChooserDelegate', 'NSUserActivityDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'HMHomeDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIDocumentPickerDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'SCNSceneRenderer', 'MTLTexture', 'GLKViewDelegate', 'EAAccessoryDelegate', 'WKScriptMessageHandler', 'PHPhotoLibraryChangeObserver', 'NSKeyedUnarchiverDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'NSMachPortDelegate', 'SCNShadable', 'UIPopoverBackgroundViewMethods', 'UIDocumentMenuDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'AVAudioMixing', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'MTLParallelRenderCommandEncoder', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UIPopoverPresentationControllerDelegate', 'UIDynamicAnimatorDelegate', 'NSTextAttachmentContainer', 'MKAnnotation', 'UIAccessibilityIdentification', 'UICoordinateSpace', 'ABNewPersonViewControllerDelegate', 'MTLDevice', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'HMHomeManagerDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'UISearchControllerDelegate', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'UITextInputTraits', 'MTLRenderPipelineState', 'GKVoiceChatClient', 'UIKeyInput', 'UICollectionViewDataSource', 'SCNTechniqueSupport', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'MTLCommandQueue', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'CAMetalDrawable', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'NSURLSessionDataDelegate'}
+COCOA_PRIMITIVES = {'ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', '_GLKMatrix3', '_GLKMatrix2', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'OpaqueCMBlockBuffer', 'CGTextDrawingMode', 'EKErrorCode', 'gss_buffer_desc_struct', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'AudioFile_SMPTE_Time', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', '__CFBinaryHeap', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', 'SFNTLookupSegment', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__SecCertificate', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', 'AnchorPoint', 'JustTable', '__CFNetService', 'CF_BRIDGED_TYPE', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'KerxSubtableHeader', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', 'OpaqueJSClass', 'CGGradient', 'OpaqueMIDISetup', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', 'MIDIThruConnectionParams', 'BslnFormat0Part', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'AnkrTable', '__SCNetworkProtocol', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'SCNMatrix4', 'KerxControlPointEntry', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'KernStateEntry', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'AVAudio3DAngularOrientation', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'KernVersion0Header', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'GCQuaternion', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'CGImagePropertyOrientation', 'CGPDFScanner', 'OpaqueMusicEventIterator', 'sfntDescriptorHeader', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'BslnFormat1Part', 'sfntFontDescriptor', 'KernSimpleArrayHeader', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGInterpolationQuality', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', '__CTRubyAnnotation', 'SCNVector4', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__GLsync', '__CFStringTokenizer', 'JustWidthDeltaEntry', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', '__CVMetalTextureCache', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'AVAudio3DPoint', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', 'CGDataConsumerCallbacks', 'ALMXHeader', 'CGLineCap', 'MIDIControlTransform', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'AVAudio3DVectorOrientation', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'OpaqueVTCompressionSession', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'VTDecompressionOutputCallbackRecord', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'OpaqueVTDecompressionSession', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'PropTable', 'KernOrderedListEntry', 'CF_BRIDGED_MUTABLE_TYPE', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'SCNVector3', 'OpaqueAudioConverter', 'MIDIRawData', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', '__SecAccessControl', 'JustPCAction', 'OpaqueVTFrameSilo', 'OpaqueVTMultiPassStorage', 'CGPathElementType', 'AudioFormatListItem', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'sfntCMapHeader', '__CFURLEnumerator', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader'}
if __name__ == '__main__': # pragma: no cover
import os
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_csound_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_csound_builtins.py
index 72e1fe39e9..3e891be4ff 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_csound_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_csound_builtins.py
@@ -7,51 +7,51 @@
:license: BSD, see LICENSE for details.
"""
-# Opcodes in Csound 6.13.0 using:
-# python3 -c "
-# import re
-# from subprocess import Popen, PIPE
-# output = Popen(['csound', '--list-opcodes0'], stderr=PIPE, text=True).communicate()[1]
-# opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
-# output = Popen(['csound', '--list-opcodes2'], stderr=PIPE, text=True).communicate()[1]
-# all_opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
+# Opcodes in Csound 6.13.0 using:
+# python3 -c "
+# import re
+# from subprocess import Popen, PIPE
+# output = Popen(['csound', '--list-opcodes0'], stderr=PIPE, text=True).communicate()[1]
+# opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
+# output = Popen(['csound', '--list-opcodes2'], stderr=PIPE, text=True).communicate()[1]
+# all_opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
# deprecated_opcodes = [opcode for opcode in all_opcodes if opcode not in opcodes]
-# # Remove opcodes that csound.py treats as keywords.
-# keyword_opcodes = [
-# 'cggoto', # https://csound.com/docs/manual/cggoto.html
-# 'cigoto', # https://csound.com/docs/manual/cigoto.html
-# 'cingoto', # (undocumented)
-# 'ckgoto', # https://csound.com/docs/manual/ckgoto.html
-# 'cngoto', # https://csound.com/docs/manual/cngoto.html
-# 'cnkgoto', # (undocumented)
-# 'endin', # https://csound.com/docs/manual/endin.html
-# 'endop', # https://csound.com/docs/manual/endop.html
-# 'goto', # https://csound.com/docs/manual/goto.html
-# 'igoto', # https://csound.com/docs/manual/igoto.html
-# 'instr', # https://csound.com/docs/manual/instr.html
-# 'kgoto', # https://csound.com/docs/manual/kgoto.html
-# 'loop_ge', # https://csound.com/docs/manual/loop_ge.html
-# 'loop_gt', # https://csound.com/docs/manual/loop_gt.html
-# 'loop_le', # https://csound.com/docs/manual/loop_le.html
-# 'loop_lt', # https://csound.com/docs/manual/loop_lt.html
-# 'opcode', # https://csound.com/docs/manual/opcode.html
-# 'reinit', # https://csound.com/docs/manual/reinit.html
-# 'return', # https://csound.com/docs/manual/return.html
-# 'rireturn', # https://csound.com/docs/manual/rireturn.html
-# 'rigoto', # https://csound.com/docs/manual/rigoto.html
-# 'tigoto', # https://csound.com/docs/manual/tigoto.html
-# 'timout' # https://csound.com/docs/manual/timout.html
-# ]
-# opcodes = [opcode for opcode in opcodes if opcode not in keyword_opcodes]
-# newline = '\n'
-# print(f'''OPCODES = set(\'''
-# {newline.join(opcodes)}
+# # Remove opcodes that csound.py treats as keywords.
+# keyword_opcodes = [
+# 'cggoto', # https://csound.com/docs/manual/cggoto.html
+# 'cigoto', # https://csound.com/docs/manual/cigoto.html
+# 'cingoto', # (undocumented)
+# 'ckgoto', # https://csound.com/docs/manual/ckgoto.html
+# 'cngoto', # https://csound.com/docs/manual/cngoto.html
+# 'cnkgoto', # (undocumented)
+# 'endin', # https://csound.com/docs/manual/endin.html
+# 'endop', # https://csound.com/docs/manual/endop.html
+# 'goto', # https://csound.com/docs/manual/goto.html
+# 'igoto', # https://csound.com/docs/manual/igoto.html
+# 'instr', # https://csound.com/docs/manual/instr.html
+# 'kgoto', # https://csound.com/docs/manual/kgoto.html
+# 'loop_ge', # https://csound.com/docs/manual/loop_ge.html
+# 'loop_gt', # https://csound.com/docs/manual/loop_gt.html
+# 'loop_le', # https://csound.com/docs/manual/loop_le.html
+# 'loop_lt', # https://csound.com/docs/manual/loop_lt.html
+# 'opcode', # https://csound.com/docs/manual/opcode.html
+# 'reinit', # https://csound.com/docs/manual/reinit.html
+# 'return', # https://csound.com/docs/manual/return.html
+# 'rireturn', # https://csound.com/docs/manual/rireturn.html
+# 'rigoto', # https://csound.com/docs/manual/rigoto.html
+# 'tigoto', # https://csound.com/docs/manual/tigoto.html
+# 'timout' # https://csound.com/docs/manual/timout.html
+# ]
+# opcodes = [opcode for opcode in opcodes if opcode not in keyword_opcodes]
+# newline = '\n'
+# print(f'''OPCODES = set(\'''
+# {newline.join(opcodes)}
# \'''.split())
#
# DEPRECATED_OPCODES = set(\'''
-# {newline.join(deprecated_opcodes)}
+# {newline.join(deprecated_opcodes)}
# \'''.split())
-# ''')
+# ''')
# "
OPCODES = set('''
@@ -173,7 +173,7 @@ STKBowed
STKBrass
STKClarinet
STKDrummer
-STKFMVoices
+STKFMVoices
STKFlute
STKHevyMetl
STKMandolin
@@ -205,7 +205,7 @@ alwayson
ampdb
ampdbfs
ampmidi
-ampmidicurve
+ampmidicurve
ampmidid
areson
aresonk
@@ -422,17 +422,17 @@ flashtxt
flooper
flooper2
floor
-fluidAllOut
-fluidCCi
-fluidCCk
-fluidControl
-fluidEngine
-fluidInfo
-fluidLoad
-fluidNote
-fluidOut
-fluidProgramSelect
-fluidSetInterpMethod
+fluidAllOut
+fluidCCi
+fluidCCk
+fluidControl
+fluidEngine
+fluidInfo
+fluidLoad
+fluidNote
+fluidOut
+fluidProgramSelect
+fluidSetInterpMethod
fmanal
fmax
fmb3
@@ -507,7 +507,7 @@ grain
grain2
grain3
granule
-gtf
+gtf
guiro
harmon
harmon2
@@ -615,10 +615,10 @@ la_i_multiply_mc
la_i_multiply_mr
la_i_multiply_vc
la_i_multiply_vr
-la_i_norm1_mc
-la_i_norm1_mr
-la_i_norm1_vc
-la_i_norm1_vr
+la_i_norm1_mc
+la_i_norm1_mr
+la_i_norm1_vc
+la_i_norm1_vr
la_i_norm_euclid_mc
la_i_norm_euclid_mr
la_i_norm_euclid_vc
@@ -713,10 +713,10 @@ la_k_multiply_mc
la_k_multiply_mr
la_k_multiply_vc
la_k_multiply_vr
-la_k_norm1_mc
-la_k_norm1_mr
-la_k_norm1_vc
-la_k_norm1_vr
+la_k_norm1_mc
+la_k_norm1_mr
+la_k_norm1_vc
+la_k_norm1_vr
la_k_norm_euclid_mc
la_k_norm_euclid_mr
la_k_norm_euclid_vc
@@ -916,8 +916,8 @@ nrpn
nsamp
nstance
nstrnum
-nstrstr
-ntof
+nstrstr
+ntof
ntom
ntrpol
nxtpow2
@@ -1354,7 +1354,7 @@ strfromurl
strget
strindex
strindexk
-string2array
+string2array
strlen
strlenk
strlower
@@ -1606,7 +1606,7 @@ DEPRECATED_OPCODES = set('''
array
bformdec
bformenc
-changed
+changed
copy2ftab
copy2ttab
hrtfer
@@ -1616,7 +1616,7 @@ maxtab
mintab
pop
pop_f
-ptableiw
+ptableiw
push
push_f
scalet
@@ -1635,7 +1635,7 @@ spectrum
stack
sumtab
tabgen
-tableiw
+tableiw
tabmap
tabmap_i
tabslice
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_lua_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_lua_builtins.py
index ca3acb1c10..9b0f123d74 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_lua_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_lua_builtins.py
@@ -288,7 +288,7 @@ if __name__ == '__main__': # pragma: no cover
print('>> %s' % full_function_name)
m = get_function_module(full_function_name)
modules.setdefault(m, []).append(full_function_name)
- modules = {k: tuple(v) for k, v in modules.iteritems()}
+ modules = {k: tuple(v) for k, v in modules.iteritems()}
regenerate(__file__, modules)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_mapping.py b/contrib/python/Pygments/py2/pygments/lexers/_mapping.py
index acb71ad94b..71c692a476 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_mapping.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_mapping.py
@@ -145,7 +145,7 @@ LEXERS = {
'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
'ElmLexer': ('pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)),
'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp', 'emacs-lisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
- 'EmailLexer': ('pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)),
+ 'EmailLexer': ('pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)),
'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
@@ -255,7 +255,7 @@ LEXERS = {
'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
- 'MIMELexer': ('pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
+ 'MIMELexer': ('pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
'MSDOSSessionLexer': ('pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()),
'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
@@ -305,7 +305,7 @@ LEXERS = {
'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
- 'NotmuchLexer': ('pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()),
+ 'NotmuchLexer': ('pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()),
'NuSMVLexer': ('pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()),
'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
@@ -343,11 +343,11 @@ LEXERS = {
'PugLexer': ('pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')),
'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
- 'Python2Lexer': ('pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
- 'Python2TracebackLexer': ('pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
+ 'Python2Lexer': ('pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
+ 'Python2TracebackLexer': ('pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
- 'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
- 'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
+ 'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
+ 'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
'QBasicLexer': ('pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
@@ -369,12 +369,12 @@ LEXERS = {
'RedLexer': ('pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
- 'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), (), ()),
+ 'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), (), ()),
'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
'RoboconfGraphLexer': ('pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()),
'RoboconfInstancesLexer': ('pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()),
- 'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)),
+ 'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)),
'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
@@ -389,11 +389,11 @@ LEXERS = {
'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
- 'ScdocLexer': ('pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()),
+ 'ScdocLexer': ('pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()),
'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
- 'ShExCLexer': ('pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)),
+ 'ShExCLexer': ('pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)),
'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()),
'SlashLexer': ('pygments.lexers.slash', 'Slash', ('slash',), ('*.sl',), ()),
@@ -405,7 +405,7 @@ LEXERS = {
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
- 'SolidityLexer': ('pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
+ 'SolidityLexer': ('pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
@@ -476,9 +476,9 @@ LEXERS = {
'XtlangLexer': ('pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()),
'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')),
'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
- 'ZeekLexer': ('pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()),
+ 'ZeekLexer': ('pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()),
'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
- 'ZigLexer': ('pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)),
+ 'ZigLexer': ('pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)),
}
if __name__ == '__main__': # pragma: no cover
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_mql_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_mql_builtins.py
index e59fd910d1..0b6ae38ccf 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_mql_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_mql_builtins.py
@@ -884,7 +884,7 @@ constants = (
'PERIOD_W1',
'POINTER_AUTOMATIC',
'POINTER_DYNAMIC',
- 'POINTER_INVALID',
+ 'POINTER_INVALID',
'PRICE_CLOSE',
'PRICE_HIGH',
'PRICE_LOW',
diff --git a/contrib/python/Pygments/py2/pygments/lexers/asm.py b/contrib/python/Pygments/py2/pygments/lexers/asm.py
index 32ac936127..a55dd27bb1 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/asm.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/asm.py
@@ -37,7 +37,7 @@ class GasLexer(RegexLexer):
char = r'[\w$.@-]'
identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
number = r'(?:0[xX][a-zA-Z0-9]+|\d+)'
- register = '%' + identifier
+ register = '%' + identifier
tokens = {
'root': [
@@ -53,7 +53,7 @@ class GasLexer(RegexLexer):
(string, String),
('@' + identifier, Name.Attribute),
(number, Number.Integer),
- (register, Name.Variable),
+ (register, Name.Variable),
(r'[\r\n]+', Text, '#pop'),
(r'[;#].*?\n', Comment, '#pop'),
@@ -74,7 +74,7 @@ class GasLexer(RegexLexer):
(identifier, Name.Constant),
(number, Number.Integer),
# Registers
- (register, Name.Variable),
+ (register, Name.Variable),
# Numeric constants
('$'+number, Number.Integer),
(r"$'(.|\\')'", String.Char),
@@ -457,10 +457,10 @@ class NasmLexer(RegexLexer):
filenames = ['*.asm', '*.ASM']
mimetypes = ['text/x-nasm']
- # Tasm uses the same file endings, but TASM is not as common as NASM, so
- # we prioritize NASM higher by default
- priority = 1.0
-
+ # Tasm uses the same file endings, but TASM is not as common as NASM, so
+ # we prioritize NASM higher by default
+ priority = 1.0
+
identifier = r'[a-z$._?][\w$.?#@~]*'
hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
octn = r'[0-7]+q'
@@ -526,12 +526,12 @@ class NasmLexer(RegexLexer):
],
}
- def analyse_text(text):
- # Probably TASM
- if re.match(r'PROC', text, re.IGNORECASE):
- return False
-
+ def analyse_text(text):
+ # Probably TASM
+ if re.match(r'PROC', text, re.IGNORECASE):
+ return False
+
class NasmObjdumpLexer(ObjdumpLexer):
"""
For the output of 'objdump -d -M intel'.
@@ -625,12 +625,12 @@ class TasmLexer(RegexLexer):
],
}
- def analyse_text(text):
- # See above
- if re.match(r'PROC', text, re.I):
- return True
-
+ def analyse_text(text):
+ # See above
+ if re.match(r'PROC', text, re.I):
+ return True
+
class Ca65Lexer(RegexLexer):
"""
For ca65 assembler sources.
diff --git a/contrib/python/Pygments/py2/pygments/lexers/bibtex.py b/contrib/python/Pygments/py2/pygments/lexers/bibtex.py
index 6d15c230d2..2c4c84b54e 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/bibtex.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/bibtex.py
@@ -33,7 +33,7 @@ class BibTeXLexer(ExtendedRegexLexer):
flags = re.IGNORECASE
ALLOWED_CHARS = r'@!$&*+\-./:;<>?\[\\\]^`|~'
- IDENTIFIER = '[{}][{}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS)
+ IDENTIFIER = '[{}][{}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS)
def open_brace_callback(self, match, ctx):
opening_brace = match.group()
diff --git a/contrib/python/Pygments/py2/pygments/lexers/c_cpp.py b/contrib/python/Pygments/py2/pygments/lexers/c_cpp.py
index 5d84a37758..fd7acfed4b 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/c_cpp.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/c_cpp.py
@@ -144,21 +144,21 @@ class CFamilyLexer(RegexLexer):
]
}
- stdlib_types = {
+ stdlib_types = {
'size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t', 'sig_atomic_t', 'fpos_t',
'clock_t', 'time_t', 'va_list', 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t',
- 'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t'}
- c99_types = {
+ 'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t'}
+ c99_types = {
'_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t', 'uint8_t',
'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t', 'int_least16_t',
'int_least32_t', 'int_least64_t', 'uint_least8_t', 'uint_least16_t',
'uint_least32_t', 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t', 'uint_fast64_t',
- 'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t'}
- linux_types = {
+ 'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t'}
+ linux_types = {
'clockid_t', 'cpu_set_t', 'cpumask_t', 'dev_t', 'gid_t', 'id_t', 'ino_t', 'key_t',
'mode_t', 'nfds_t', 'pid_t', 'rlim_t', 'sig_t', 'sighandler_t', 'siginfo_t',
- 'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t'}
+ 'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t'}
def __init__(self, **options):
self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/c_like.py b/contrib/python/Pygments/py2/pygments/lexers/c_like.py
index 82dee35ad3..2d39f2812c 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/c_like.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/c_like.py
@@ -291,23 +291,23 @@ class CudaLexer(CLexer):
aliases = ['cuda', 'cu']
mimetypes = ['text/x-cuda']
- function_qualifiers = {'__device__', '__global__', '__host__',
- '__noinline__', '__forceinline__'}
- variable_qualifiers = {'__device__', '__constant__', '__shared__',
- '__restrict__'}
- vector_types = {'char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3',
- 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2',
- 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1',
- 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1',
- 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4',
- 'ulong4', 'longlong1', 'ulonglong1', 'longlong2',
- 'ulonglong2', 'float1', 'float2', 'float3', 'float4',
- 'double1', 'double2', 'dim3'}
- variables = {'gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize'}
- functions = {'__threadfence_block', '__threadfence', '__threadfence_system',
- '__syncthreads', '__syncthreads_count', '__syncthreads_and',
- '__syncthreads_or'}
- execution_confs = {'<<<', '>>>'}
+ function_qualifiers = {'__device__', '__global__', '__host__',
+ '__noinline__', '__forceinline__'}
+ variable_qualifiers = {'__device__', '__constant__', '__shared__',
+ '__restrict__'}
+ vector_types = {'char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3',
+ 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2',
+ 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1',
+ 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1',
+ 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4',
+ 'ulong4', 'longlong1', 'ulonglong1', 'longlong2',
+ 'ulonglong2', 'float1', 'float2', 'float3', 'float4',
+ 'double1', 'double2', 'dim3'}
+ variables = {'gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize'}
+ functions = {'__threadfence_block', '__threadfence', '__threadfence_system',
+ '__syncthreads', '__syncthreads_count', '__syncthreads_and',
+ '__syncthreads_or'}
+ execution_confs = {'<<<', '>>>'}
def get_tokens_unprocessed(self, text):
for index, token, value in CLexer.get_tokens_unprocessed(self, text):
@@ -352,7 +352,7 @@ class SwigLexer(CppLexer):
}
# This is a far from complete set of SWIG directives
- swig_directives = {
+ swig_directives = {
# Most common directives
'%apply', '%define', '%director', '%enddef', '%exception', '%extend',
'%feature', '%fragment', '%ignore', '%immutable', '%import', '%include',
@@ -371,7 +371,7 @@ class SwigLexer(CppLexer):
'%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall',
'%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof',
'%trackobjects', '%types', '%unrefobject', '%varargs', '%warn',
- '%warnfilter'}
+ '%warnfilter'}
def analyse_text(text):
rv = 0
@@ -429,13 +429,13 @@ class ArduinoLexer(CppLexer):
mimetypes = ['text/x-arduino']
# Language sketch main structure functions
- structure = {'setup', 'loop'}
+ structure = {'setup', 'loop'}
# Language operators
- operators = {'not', 'or', 'and', 'xor'}
+ operators = {'not', 'or', 'and', 'xor'}
# Language 'variables'
- variables = {
+ variables = {
'DIGITAL_MESSAGE', 'FIRMATA_STRING', 'ANALOG_MESSAGE', 'REPORT_DIGITAL',
'REPORT_ANALOG', 'INPUT_PULLUP', 'SET_PIN_MODE', 'INTERNAL2V56', 'SYSTEM_RESET',
'LED_BUILTIN', 'INTERNAL1V1', 'SYSEX_START', 'INTERNAL', 'EXTERNAL', 'HIGH',
@@ -452,10 +452,10 @@ class ArduinoLexer(CppLexer):
'signed', 'inline', 'delete', '_Bool', 'complex', '_Complex', '_Imaginary',
'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short',
'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong',
- 'atomic_llong', 'atomic_ullong', 'PROGMEM'}
+ 'atomic_llong', 'atomic_ullong', 'PROGMEM'}
# Language shipped functions and class ( )
- functions = {
+ functions = {
'KeyboardController', 'MouseController', 'SoftwareSerial', 'EthernetServer',
'EthernetClient', 'LiquidCrystal', 'RobotControl', 'GSMVoiceCall',
'EthernetUDP', 'EsploraTFT', 'HttpClient', 'RobotMotor', 'WiFiClient',
@@ -517,13 +517,13 @@ class ArduinoLexer(CppLexer):
'cos', 'sin', 'pow', 'map', 'abs', 'max', 'min', 'get', 'run', 'put',
'isAlphaNumeric', 'isAlpha', 'isAscii', 'isWhitespace', 'isControl', 'isDigit',
'isGraph', 'isLowerCase', 'isPrintable', 'isPunct', 'isSpace', 'isUpperCase',
- 'isHexadecimalDigit'}
+ 'isHexadecimalDigit'}
# do not highlight
- suppress_highlight = {
+ suppress_highlight = {
'namespace', 'template', 'mutable', 'using', 'asm', 'typeid',
'typename', 'this', 'alignof', 'constexpr', 'decltype', 'noexcept',
- 'static_assert', 'thread_local', 'restrict'}
+ 'static_assert', 'thread_local', 'restrict'}
def get_tokens_unprocessed(self, text):
for index, token, value in CppLexer.get_tokens_unprocessed(self, text):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/configs.py b/contrib/python/Pygments/py2/pygments/lexers/configs.py
index 0911b6e24c..d9b0f188b2 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/configs.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/configs.py
@@ -300,7 +300,7 @@ class ApacheConfLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Text),
- (r'#(.*\\\n)+.*$|(#.*?)$', Comment),
+ (r'#(.*\\\n)+.*$|(#.*?)$', Comment),
(r'(<[^\s>]+)(?:(\s+)(.*))?(>)',
bygroups(Name.Tag, Text, String, Name.Tag)),
(r'([a-z]\w*)(\s+)',
@@ -319,7 +319,7 @@ class ApacheConfLexer(RegexLexer):
r'os|productonly|full|emerg|alert|crit|error|warn|'
r'notice|info|debug|registry|script|inetd|standalone|'
r'user|group)\b', Keyword),
- (r'"([^"\\]*(?:\\(.|[\n])[^"\\]*)*)"', String.Double),
+ (r'"([^"\\]*(?:\\(.|[\n])[^"\\]*)*)"', String.Double),
(r'[^\s"\\]+', Text)
],
}
@@ -540,16 +540,16 @@ class DockerLexer(RegexLexer):
filenames = ['Dockerfile', '*.docker']
mimetypes = ['text/x-dockerfile-config']
- _keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
+ _keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
_bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)')
- _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex
+ _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
(r'#.*', Comment),
- (r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?',
- bygroups(Keyword, Text, String, Text, Keyword, Text, String)),
+ (r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?',
+ bygroups(Keyword, Text, String, Text, Keyword, Text, String)),
(r'(ONBUILD)(%s)' % (_lb,), bygroups(Keyword, using(BashLexer))),
(r'(HEALTHCHECK)((%s--\w+=\w+%s)*)' % (_lb, _lb),
bygroups(Keyword, using(BashLexer))),
@@ -576,35 +576,35 @@ class TerraformLexer(RegexLexer):
filenames = ['*.tf']
mimetypes = ['application/x-tf', 'application/x-terraform']
- embedded_keywords = ('ingress', 'egress', 'listener', 'default',
- 'connection', 'alias', 'terraform', 'tags', 'vars',
- 'config', 'lifecycle', 'timeouts')
+ embedded_keywords = ('ingress', 'egress', 'listener', 'default',
+ 'connection', 'alias', 'terraform', 'tags', 'vars',
+ 'config', 'lifecycle', 'timeouts')
tokens = {
'root': [
- include('string'),
- include('punctuation'),
- include('curly'),
- include('basic'),
- include('whitespace'),
- (r'[0-9]+', Number),
+ include('string'),
+ include('punctuation'),
+ include('curly'),
+ include('basic'),
+ include('whitespace'),
+ (r'[0-9]+', Number),
],
'basic': [
- (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type),
- (r'\s*/\*', Comment.Multiline, 'comment'),
- (r'\s*#.*\n', Comment.Single),
- (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)),
- (words(('variable', 'resource', 'provider', 'provisioner', 'module',
- 'backend', 'data', 'output'), prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved, 'function'),
- (words(embedded_keywords, prefix=r'\b', suffix=r'\b'),
- Keyword.Declaration),
- (r'\$\{', String.Interpol, 'var_builtin'),
+ (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (r'\s*/\*', Comment.Multiline, 'comment'),
+ (r'\s*#.*\n', Comment.Single),
+ (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)),
+ (words(('variable', 'resource', 'provider', 'provisioner', 'module',
+ 'backend', 'data', 'output'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved, 'function'),
+ (words(embedded_keywords, prefix=r'\b', suffix=r'\b'),
+ Keyword.Declaration),
+ (r'\$\{', String.Interpol, 'var_builtin'),
],
'function': [
- (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)),
- include('punctuation'),
- include('curly'),
+ (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)),
+ include('punctuation'),
+ include('curly'),
],
'var_builtin': [
(r'\$\{', String.Interpol, '#push'),
diff --git a/contrib/python/Pygments/py2/pygments/lexers/csound.py b/contrib/python/Pygments/py2/pygments/lexers/csound.py
index c35bd94b0e..e5ee14c652 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/csound.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/csound.py
@@ -35,7 +35,7 @@ class CsoundLexer(RegexLexer):
'preprocessor directives': [
(r'#(?:e(?:nd(?:if)?|lse)\b|##)|@@?[ \t]*\d+', Comment.Preproc),
- (r'#includestr', Comment.Preproc, 'includestr directive'),
+ (r'#includestr', Comment.Preproc, 'includestr directive'),
(r'#include', Comment.Preproc, 'include directive'),
(r'#[ \t]*define', Comment.Preproc, 'define directive'),
(r'#(?:ifn?def|undef)\b', Comment.Preproc, 'macro directive')
@@ -45,10 +45,10 @@ class CsoundLexer(RegexLexer):
include('whitespace'),
(r'([^ \t]).*?\1', String, '#pop')
],
- 'includestr directive': [
- include('whitespace'),
- (r'"', String, ('#pop', 'quoted string'))
- ],
+ 'includestr directive': [
+ include('whitespace'),
+ (r'"', String, ('#pop', 'quoted string'))
+ ],
'define directive': [
(r'\n', Text),
@@ -119,13 +119,13 @@ class CsoundLexer(RegexLexer):
(r'\d+', Number.Integer)
],
- 'quoted string': [
- (r'"', String, '#pop'),
- (r'[^"$]+', String),
- include('macro uses'),
- (r'[$]', String)
- ],
-
+ 'quoted string': [
+ (r'"', String, '#pop'),
+ (r'[^"$]+', String),
+ include('macro uses'),
+ (r'[$]', String)
+ ],
+
'braced string': [
# Do nothing. This must be defined in subclasses.
]
@@ -134,7 +134,7 @@ class CsoundLexer(RegexLexer):
class CsoundScoreLexer(CsoundLexer):
"""
- For `Csound <https://csound.com>`_ scores.
+ For `Csound <https://csound.com>`_ scores.
.. versionadded:: 2.1
"""
@@ -156,7 +156,7 @@ class CsoundScoreLexer(CsoundLexer):
(r'z', Keyword.Constant),
# z is a constant equal to 800,000,000,000. 800 billion seconds is about
# 25,367.8 years. See also
- # https://csound.com/docs/manual/ScoreTop.html and
+ # https://csound.com/docs/manual/ScoreTop.html and
# https://github.com/csound/csound/search?q=stof+path%3AEngine+filename%3Asread.c.
(r'([nNpP][pP])(\d+)', bygroups(Keyword, Number.Integer)),
@@ -200,7 +200,7 @@ class CsoundScoreLexer(CsoundLexer):
class CsoundOrchestraLexer(CsoundLexer):
"""
- For `Csound <https://csound.com>`_ orchestras.
+ For `Csound <https://csound.com>`_ orchestras.
.. versionadded:: 2.1
"""
@@ -217,25 +217,25 @@ class CsoundOrchestraLexer(CsoundLexer):
yield match.start(), Name.Function, opcode
def name_callback(lexer, match):
- type_annotation_token = Keyword.Type
-
+ type_annotation_token = Keyword.Type
+
name = match.group(1)
if name in OPCODES or name in DEPRECATED_OPCODES:
yield match.start(), Name.Builtin, name
elif name in lexer.user_defined_opcodes:
yield match.start(), Name.Function, name
else:
- type_annotation_token = Name
- name_match = re.search(r'^(g?[afikSw])(\w+)', name)
- if name_match:
- yield name_match.start(1), Keyword.Type, name_match.group(1)
- yield name_match.start(2), Name, name_match.group(2)
+ type_annotation_token = Name
+ name_match = re.search(r'^(g?[afikSw])(\w+)', name)
+ if name_match:
+ yield name_match.start(1), Keyword.Type, name_match.group(1)
+ yield name_match.start(2), Name, name_match.group(2)
else:
yield match.start(), Name, name
- if match.group(2):
- yield match.start(2), Punctuation, match.group(2)
- yield match.start(3), type_annotation_token, match.group(3)
+ if match.group(2):
+ yield match.start(2), Punctuation, match.group(2)
+ yield match.start(3), type_annotation_token, match.group(3)
tokens = {
'root': [
@@ -330,13 +330,13 @@ class CsoundOrchestraLexer(CsoundLexer):
(r'\\(?:[\\abnrt"]|[0-7]{1,3})', String.Escape)
],
# Format specifiers are highlighted in all strings, even though only
- # fprintks https://csound.com/docs/manual/fprintks.html
- # fprints https://csound.com/docs/manual/fprints.html
- # printf/printf_i https://csound.com/docs/manual/printf.html
- # printks https://csound.com/docs/manual/printks.html
- # prints https://csound.com/docs/manual/prints.html
- # sprintf https://csound.com/docs/manual/sprintf.html
- # sprintfk https://csound.com/docs/manual/sprintfk.html
+ # fprintks https://csound.com/docs/manual/fprintks.html
+ # fprints https://csound.com/docs/manual/fprints.html
+ # printf/printf_i https://csound.com/docs/manual/printf.html
+ # printks https://csound.com/docs/manual/printks.html
+ # prints https://csound.com/docs/manual/prints.html
+ # sprintf https://csound.com/docs/manual/sprintf.html
+ # sprintfk https://csound.com/docs/manual/sprintfk.html
# work with strings that contain format specifiers. In addition, these
# opcodes’ handling of format specifiers is inconsistent:
# - fprintks, fprints, printks, and prints do accept %a and %A
@@ -373,7 +373,7 @@ class CsoundOrchestraLexer(CsoundLexer):
'Csound score opcode': [
include('whitespace and macro uses'),
- (r'"', String, 'quoted string'),
+ (r'"', String, 'quoted string'),
(r'\{\{', String, 'Csound score'),
(r'\n', Text, '#pop')
],
@@ -384,7 +384,7 @@ class CsoundOrchestraLexer(CsoundLexer):
'Python opcode': [
include('whitespace and macro uses'),
- (r'"', String, 'quoted string'),
+ (r'"', String, 'quoted string'),
(r'\{\{', String, 'Python'),
(r'\n', Text, '#pop')
],
@@ -395,7 +395,7 @@ class CsoundOrchestraLexer(CsoundLexer):
'Lua opcode': [
include('whitespace and macro uses'),
- (r'"', String, 'quoted string'),
+ (r'"', String, 'quoted string'),
(r'\{\{', String, 'Lua'),
(r'\n', Text, '#pop')
],
@@ -408,7 +408,7 @@ class CsoundOrchestraLexer(CsoundLexer):
class CsoundDocumentLexer(RegexLexer):
"""
- For `Csound <https://csound.com>`_ documents.
+ For `Csound <https://csound.com>`_ documents.
.. versionadded:: 2.1
"""
diff --git a/contrib/python/Pygments/py2/pygments/lexers/data.py b/contrib/python/Pygments/py2/pygments/lexers/data.py
index 46ca734006..ccfe8a31ef 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/data.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/data.py
@@ -233,7 +233,7 @@ class YamlLexer(ExtendedRegexLexer):
# whitespaces separating tokens
(r'[ ]+', Text),
# key with colon
- (r'''([^#,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
+ (r'''([^#,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
bygroups(Name.Tag, set_indent(Punctuation, implicit=True))),
# tags, anchors and aliases,
include('descriptors'),
diff --git a/contrib/python/Pygments/py2/pygments/lexers/dsls.py b/contrib/python/Pygments/py2/pygments/lexers/dsls.py
index 0af3c6c273..d940ca4461 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/dsls.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/dsls.py
@@ -16,7 +16,7 @@ from pygments.lexer import ExtendedRegexLexer, RegexLexer, bygroups, words, \
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Literal, Whitespace
-__all__ = ['ProtoBufLexer', 'ZeekLexer', 'PuppetLexer', 'RslLexer',
+__all__ = ['ProtoBufLexer', 'ZeekLexer', 'PuppetLexer', 'RslLexer',
'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer',
'CrmshLexer', 'ThriftLexer', 'FlatlineLexer', 'SnowballLexer']
@@ -40,9 +40,9 @@ class ProtoBufLexer(RegexLexer):
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
(words((
- 'import', 'option', 'optional', 'required', 'repeated',
- 'reserved', 'default', 'packed', 'ctype', 'extensions', 'to',
- 'max', 'rpc', 'returns', 'oneof'), prefix=r'\b', suffix=r'\b'),
+ 'import', 'option', 'optional', 'required', 'repeated',
+ 'reserved', 'default', 'packed', 'ctype', 'extensions', 'to',
+ 'max', 'rpc', 'returns', 'oneof'), prefix=r'\b', suffix=r'\b'),
Keyword),
(words((
'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
@@ -188,166 +188,166 @@ class ThriftLexer(RegexLexer):
}
-class ZeekLexer(RegexLexer):
+class ZeekLexer(RegexLexer):
"""
- For `Zeek <https://www.zeek.org/>`_ scripts.
+ For `Zeek <https://www.zeek.org/>`_ scripts.
- .. versionadded:: 2.5
+ .. versionadded:: 2.5
"""
- name = 'Zeek'
- aliases = ['zeek', 'bro']
- filenames = ['*.zeek', '*.bro']
+ name = 'Zeek'
+ aliases = ['zeek', 'bro']
+ filenames = ['*.zeek', '*.bro']
- _hex = r'[0-9a-fA-F]'
+ _hex = r'[0-9a-fA-F]'
_float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
_h = r'[A-Za-z0-9][-A-Za-z0-9]*'
tokens = {
'root': [
- include('whitespace'),
- include('comments'),
- include('directives'),
- include('attributes'),
- include('types'),
- include('keywords'),
- include('literals'),
- include('operators'),
- include('punctuation'),
- (r'((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(?=\s*\()',
- Name.Function),
- include('identifiers'),
- ],
-
- 'whitespace': [
+ include('whitespace'),
+ include('comments'),
+ include('directives'),
+ include('attributes'),
+ include('types'),
+ include('keywords'),
+ include('literals'),
+ include('operators'),
+ include('punctuation'),
+ (r'((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(?=\s*\()',
+ Name.Function),
+ include('identifiers'),
+ ],
+
+ 'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text),
- ],
-
- 'comments': [
- (r'#.*$', Comment),
- ],
-
- 'directives': [
- (r'@(load-plugin|load-sigs|load|unload)\b.*$', Comment.Preproc),
- (r'@(DEBUG|DIR|FILENAME|deprecated|if|ifdef|ifndef|else|endif)\b', Comment.Preproc),
- (r'(@prefixes)\s*(\+?=).*$', Comment.Preproc),
- ],
-
- 'attributes': [
- (words(('redef', 'priority', 'log', 'optional', 'default', 'add_func',
- 'delete_func', 'expire_func', 'read_expire', 'write_expire',
- 'create_expire', 'synchronized', 'persistent', 'rotate_interval',
- 'rotate_size', 'encrypt', 'raw_output', 'mergeable', 'error_handler',
- 'type_column', 'deprecated'),
- prefix=r'&', suffix=r'\b'),
- Keyword.Pseudo),
- ],
-
- 'types': [
- (words(('any',
- 'enum', 'record', 'set', 'table', 'vector',
- 'function', 'hook', 'event',
- 'addr', 'bool', 'count', 'double', 'file', 'int', 'interval',
- 'pattern', 'port', 'string', 'subnet', 'time'),
- suffix=r'\b'),
- Keyword.Type),
-
- (r'(opaque)(\s+)(of)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
- bygroups(Keyword.Type, Text, Operator.Word, Text, Keyword.Type)),
-
- (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)(\s*)\b(record|enum)\b',
- bygroups(Keyword, Text, Name.Class, Text, Operator, Text, Keyword.Type)),
-
- (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)',
- bygroups(Keyword, Text, Name, Text, Operator)),
-
- (r'(redef)(\s+)(record|enum)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
- bygroups(Keyword, Text, Keyword.Type, Text, Name.Class)),
- ],
-
- 'keywords': [
- (words(('redef', 'export', 'if', 'else', 'for', 'while',
- 'return', 'break', 'next', 'continue', 'fallthrough',
- 'switch', 'default', 'case',
- 'add', 'delete',
- 'when', 'timeout', 'schedule'),
- suffix=r'\b'),
- Keyword),
- (r'(print)\b', Keyword),
- (r'(global|local|const|option)\b', Keyword.Declaration),
- (r'(module)(\s+)(([A-Za-z_]\w*)(?:::([A-Za-z_]\w*))*)\b',
- bygroups(Keyword.Namespace, Text, Name.Namespace)),
- ],
-
- 'literals': [
- (r'"', String, 'string'),
-
- # Not the greatest match for patterns, but generally helps
- # disambiguate between start of a pattern and just a division
- # operator.
- (r'/(?=.*/)', String.Regex, 'regex'),
-
+ ],
+
+ 'comments': [
+ (r'#.*$', Comment),
+ ],
+
+ 'directives': [
+ (r'@(load-plugin|load-sigs|load|unload)\b.*$', Comment.Preproc),
+ (r'@(DEBUG|DIR|FILENAME|deprecated|if|ifdef|ifndef|else|endif)\b', Comment.Preproc),
+ (r'(@prefixes)\s*(\+?=).*$', Comment.Preproc),
+ ],
+
+ 'attributes': [
+ (words(('redef', 'priority', 'log', 'optional', 'default', 'add_func',
+ 'delete_func', 'expire_func', 'read_expire', 'write_expire',
+ 'create_expire', 'synchronized', 'persistent', 'rotate_interval',
+ 'rotate_size', 'encrypt', 'raw_output', 'mergeable', 'error_handler',
+ 'type_column', 'deprecated'),
+ prefix=r'&', suffix=r'\b'),
+ Keyword.Pseudo),
+ ],
+
+ 'types': [
+ (words(('any',
+ 'enum', 'record', 'set', 'table', 'vector',
+ 'function', 'hook', 'event',
+ 'addr', 'bool', 'count', 'double', 'file', 'int', 'interval',
+ 'pattern', 'port', 'string', 'subnet', 'time'),
+ suffix=r'\b'),
+ Keyword.Type),
+
+ (r'(opaque)(\s+)(of)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
+ bygroups(Keyword.Type, Text, Operator.Word, Text, Keyword.Type)),
+
+ (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)(\s*)\b(record|enum)\b',
+ bygroups(Keyword, Text, Name.Class, Text, Operator, Text, Keyword.Type)),
+
+ (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)',
+ bygroups(Keyword, Text, Name, Text, Operator)),
+
+ (r'(redef)(\s+)(record|enum)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
+ bygroups(Keyword, Text, Keyword.Type, Text, Name.Class)),
+ ],
+
+ 'keywords': [
+ (words(('redef', 'export', 'if', 'else', 'for', 'while',
+ 'return', 'break', 'next', 'continue', 'fallthrough',
+ 'switch', 'default', 'case',
+ 'add', 'delete',
+ 'when', 'timeout', 'schedule'),
+ suffix=r'\b'),
+ Keyword),
+ (r'(print)\b', Keyword),
+ (r'(global|local|const|option)\b', Keyword.Declaration),
+ (r'(module)(\s+)(([A-Za-z_]\w*)(?:::([A-Za-z_]\w*))*)\b',
+ bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ ],
+
+ 'literals': [
+ (r'"', String, 'string'),
+
+ # Not the greatest match for patterns, but generally helps
+ # disambiguate between start of a pattern and just a division
+ # operator.
+ (r'/(?=.*/)', String.Regex, 'regex'),
+
(r'(T|F)\b', Keyword.Constant),
-
- # Port
- (r'\d{1,5}/(udp|tcp|icmp|unknown)\b', Number),
-
- # IPv4 Address
- (r'(\d{1,3}.){3}(\d{1,3})\b', Number),
-
- # IPv6 Address
- (r'\[([0-9a-fA-F]{0,4}:){2,7}([0-9a-fA-F]{0,4})?((\d{1,3}.){3}(\d{1,3}))?\]', Number),
-
- # Numeric
- (r'0[xX]' + _hex + r'+\b', Number.Hex),
- (_float + r'\s*(day|hr|min|sec|msec|usec)s?\b', Number.Float),
- (_float + r'\b', Number.Float),
- (r'(\d+)\b', Number.Integer),
-
+
+ # Port
+ (r'\d{1,5}/(udp|tcp|icmp|unknown)\b', Number),
+
+ # IPv4 Address
+ (r'(\d{1,3}.){3}(\d{1,3})\b', Number),
+
+ # IPv6 Address
+ (r'\[([0-9a-fA-F]{0,4}:){2,7}([0-9a-fA-F]{0,4})?((\d{1,3}.){3}(\d{1,3}))?\]', Number),
+
+ # Numeric
+ (r'0[xX]' + _hex + r'+\b', Number.Hex),
+ (_float + r'\s*(day|hr|min|sec|msec|usec)s?\b', Number.Float),
+ (_float + r'\b', Number.Float),
+ (r'(\d+)\b', Number.Integer),
+
# Hostnames
(_h + r'(\.' + _h + r')+', String),
- ],
-
- 'operators': [
- (r'[!%*/+<=>~|&^-]', Operator),
+ ],
+
+ 'operators': [
+ (r'[!%*/+<=>~|&^-]', Operator),
(r'([-+=&|]{2}|[+=!><-]=)', Operator),
- (r'(in|as|is|of)\b', Operator.Word),
- (r'\??\$', Operator),
- ],
-
- 'punctuation': [
- (r'[{}()\[\],;.]', Punctuation),
- # The "ternary if", which uses '?' and ':', could instead be
- # treated as an Operator, but colons are more frequently used to
- # separate field/identifier names from their types, so the (often)
- # less-prominent Punctuation is used even with '?' for consistency.
- (r'[?:]', Punctuation),
- ],
-
- 'identifiers': [
- (r'([a-zA-Z_]\w*)(::)', bygroups(Name, Punctuation)),
+ (r'(in|as|is|of)\b', Operator.Word),
+ (r'\??\$', Operator),
+ ],
+
+ 'punctuation': [
+ (r'[{}()\[\],;.]', Punctuation),
+ # The "ternary if", which uses '?' and ':', could instead be
+ # treated as an Operator, but colons are more frequently used to
+ # separate field/identifier names from their types, so the (often)
+ # less-prominent Punctuation is used even with '?' for consistency.
+ (r'[?:]', Punctuation),
+ ],
+
+ 'identifiers': [
+ (r'([a-zA-Z_]\w*)(::)', bygroups(Name, Punctuation)),
(r'[a-zA-Z_]\w*', Name)
],
-
+
'string': [
- (r'\\.', String.Escape),
- (r'%-?[0-9]*(\.[0-9]+)?[DTdxsefg]', String.Escape),
+ (r'\\.', String.Escape),
+ (r'%-?[0-9]*(\.[0-9]+)?[DTdxsefg]', String.Escape),
(r'"', String, '#pop'),
- (r'.', String),
+ (r'.', String),
],
-
+
'regex': [
- (r'\\.', String.Escape),
+ (r'\\.', String.Escape),
(r'/', String.Regex, '#pop'),
- (r'.', String.Regex),
- ],
+ (r'.', String.Regex),
+ ],
}
-BroLexer = ZeekLexer
-
-
+BroLexer = ZeekLexer
+
+
class PuppetLexer(RegexLexer):
"""
For `Puppet <http://puppetlabs.com/>`__ configuration DSL.
diff --git a/contrib/python/Pygments/py2/pygments/lexers/dylan.py b/contrib/python/Pygments/py2/pygments/lexers/dylan.py
index dd972bf4d6..e0a9f76d5c 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/dylan.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/dylan.py
@@ -32,27 +32,27 @@ class DylanLexer(RegexLexer):
flags = re.IGNORECASE
- builtins = {
+ builtins = {
'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
'each-subclass', 'exception', 'exclude', 'function', 'generic',
'handler', 'inherited', 'inline', 'inline-only', 'instance',
'interface', 'import', 'keyword', 'library', 'macro', 'method',
'module', 'open', 'primary', 'required', 'sealed', 'sideways',
- 'singleton', 'slot', 'thread', 'variable', 'virtual'}
+ 'singleton', 'slot', 'thread', 'variable', 'virtual'}
- keywords = {
+ keywords = {
'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
- 'while'}
+ 'while'}
- operators = {
+ operators = {
'~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
- '>', '>=', '&', '|'}
+ '>', '>=', '&', '|'}
- functions = {
+ functions = {
'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
@@ -86,7 +86,7 @@ class DylanLexer(RegexLexer):
'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
- 'vector', 'zero?'}
+ 'vector', 'zero?'}
valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
diff --git a/contrib/python/Pygments/py2/pygments/lexers/elm.py b/contrib/python/Pygments/py2/pygments/lexers/elm.py
index ee941d7dbb..36bc2b5c30 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/elm.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/elm.py
@@ -77,7 +77,7 @@ class ElmLexer(RegexLexer):
(words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function),
# Infix Operators
- (words(builtinOps), Name.Function),
+ (words(builtinOps), Name.Function),
# Numbers
include('numbers'),
diff --git a/contrib/python/Pygments/py2/pygments/lexers/email.py b/contrib/python/Pygments/py2/pygments/lexers/email.py
index 5ad225bb75..3009021518 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/email.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/email.py
@@ -1,154 +1,154 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.email
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the raw E-mail.
-
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, DelegatingLexer, bygroups
-from pygments.lexers.mime import MIMELexer
-from pygments.token import Text, Keyword, Name, String, Number, Comment
-from pygments.util import get_bool_opt
-
-__all__ = ["EmailLexer"]
-
-
-class EmailHeaderLexer(RegexLexer):
- """
- Sub-lexer for raw E-mail. This lexer only process header part of e-mail.
-
- .. versionadded:: 2.5
- """
-
- def __init__(self, **options):
- super(EmailHeaderLexer, self).__init__(**options)
- self.highlight_x = get_bool_opt(options, "highlight-X-header", False)
-
- def get_x_header_tokens(self, match):
- if self.highlight_x:
- # field
- yield match.start(1), Name.Tag, match.group(1)
-
- # content
- default_actions = self.get_tokens_unprocessed(
- match.group(2), stack=("root", "header"))
- for item in default_actions:
- yield item
- else:
- # lowlight
- yield match.start(1), Comment.Special, match.group(1)
- yield match.start(2), Comment.Multiline, match.group(2)
-
- tokens = {
- "root": [
- (r"^(?:[A-WYZ]|X400)[\w\-]*:", Name.Tag, "header"),
- (r"^(X-(?:\w[\w\-]*:))([\s\S]*?\n)(?![ \t])", get_x_header_tokens),
- ],
- "header": [
- # folding
- (r"\n[ \t]", Text.Whitespace),
- (r"\n(?![ \t])", Text.Whitespace, "#pop"),
-
- # keywords
- (r"\bE?SMTPS?\b", Keyword),
- (r"\b(?:HE|EH)LO\b", Keyword),
-
- # mailbox
- (r"[\w\.\-\+=]+@[\w\.\-]+", Name.Label),
- (r"<[\w\.\-\+=]+@[\w\.\-]+>", Name.Label),
-
- # domain
- (r"\b(\w[\w\.-]*\.[\w\.-]*\w[a-zA-Z]+)\b", Name.Function),
-
- # IPv4
- (
- r"(?<=\b)(?:(?:25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(?:25[0"
- r"-5]|2[0-4][0-9]|1?[0-9][0-9]?)(?=\b)",
- Number.Integer,
- ),
-
- # IPv6
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,7}:(?!\b)", Number.Hex),
- (r"(?<=\b):((:[0-9a-fA-F]{1,4}){1,7}|:)(?=\b)", Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
- (r"(?<=\b)[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})(?=\b)", Number.Hex),
- (r"(?<=\b)fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}(?=\b)", Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}(?=\b)", Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}(?=\b)",
- Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}(?=\b)",
- Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}(?=\b)",
- Number.Hex),
- (
- r"(?<=\b)::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}"
- r"[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}"
- r"[0-9])(?=\b)",
- Number.Hex,
- ),
- (
- r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-"
- r"9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-"
- r"9])(?=\b)",
- Number.Hex,
- ),
-
- # Date time
- (
- r"(?:(Sun|Mon|Tue|Wed|Thu|Fri|Sat),\s+)?(0[1-9]|[1-2]?[0-9]|3["
- r"01])\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+("
- r"19[0-9]{2}|[2-9][0-9]{3})\s+(2[0-3]|[0-1][0-9]):([0-5][0-9])"
- r"(?::(60|[0-5][0-9]))?(?:\.\d{1,5})?\s+([-\+][0-9]{2}[0-5][0-"
- r"9]|\(?(?:UTC?|GMT|(?:E|C|M|P)(?:ST|ET|DT)|[A-IK-Z])\)?)",
- Name.Decorator,
- ),
-
- # RFC-2047 encoded string
- (
- r"(=\?)([\w-]+)(\?)([BbQq])(\?)([\[\w!\"#$%&\'()*+,-./:;<=>@[\\"
- r"\]^_`{|}~]+)(\?=)",
- bygroups(
- String.Affix,
- Name.Constant,
- String.Affix,
- Keyword.Constant,
- String.Affix,
- Number.Hex,
- String.Affix
- )
- ),
-
- # others
- (r'[\s]+', Text.Whitespace),
- (r'[\S]', Text),
- ],
- }
-
-
-class EmailLexer(DelegatingLexer):
- """
- Lexer for raw E-mail.
-
- Additional options accepted:
-
- `highlight-X-header`
- Highlight the fields of ``X-`` user-defined email header. (default:
- ``False``).
-
- .. versionadded:: 2.5
- """
-
- name = "E-mail"
- aliases = ["email", "eml"]
- filenames = ["*.eml"]
- mimetypes = ["message/rfc822"]
-
- def __init__(self, **options):
- super(EmailLexer, self).__init__(
- EmailHeaderLexer, MIMELexer, Comment, **options
- )
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.email
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the raw E-mail.
+
+ :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, DelegatingLexer, bygroups
+from pygments.lexers.mime import MIMELexer
+from pygments.token import Text, Keyword, Name, String, Number, Comment
+from pygments.util import get_bool_opt
+
+__all__ = ["EmailLexer"]
+
+
+class EmailHeaderLexer(RegexLexer):
+ """
+ Sub-lexer for raw E-mail. This lexer only process header part of e-mail.
+
+ .. versionadded:: 2.5
+ """
+
+ def __init__(self, **options):
+ super(EmailHeaderLexer, self).__init__(**options)
+ self.highlight_x = get_bool_opt(options, "highlight-X-header", False)
+
+ def get_x_header_tokens(self, match):
+ if self.highlight_x:
+ # field
+ yield match.start(1), Name.Tag, match.group(1)
+
+ # content
+ default_actions = self.get_tokens_unprocessed(
+ match.group(2), stack=("root", "header"))
+ for item in default_actions:
+ yield item
+ else:
+ # lowlight
+ yield match.start(1), Comment.Special, match.group(1)
+ yield match.start(2), Comment.Multiline, match.group(2)
+
+ tokens = {
+ "root": [
+ (r"^(?:[A-WYZ]|X400)[\w\-]*:", Name.Tag, "header"),
+ (r"^(X-(?:\w[\w\-]*:))([\s\S]*?\n)(?![ \t])", get_x_header_tokens),
+ ],
+ "header": [
+ # folding
+ (r"\n[ \t]", Text.Whitespace),
+ (r"\n(?![ \t])", Text.Whitespace, "#pop"),
+
+ # keywords
+ (r"\bE?SMTPS?\b", Keyword),
+ (r"\b(?:HE|EH)LO\b", Keyword),
+
+ # mailbox
+ (r"[\w\.\-\+=]+@[\w\.\-]+", Name.Label),
+ (r"<[\w\.\-\+=]+@[\w\.\-]+>", Name.Label),
+
+ # domain
+ (r"\b(\w[\w\.-]*\.[\w\.-]*\w[a-zA-Z]+)\b", Name.Function),
+
+ # IPv4
+ (
+ r"(?<=\b)(?:(?:25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(?:25[0"
+ r"-5]|2[0-4][0-9]|1?[0-9][0-9]?)(?=\b)",
+ Number.Integer,
+ ),
+
+ # IPv6
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,7}:(?!\b)", Number.Hex),
+ (r"(?<=\b):((:[0-9a-fA-F]{1,4}){1,7}|:)(?=\b)", Number.Hex),
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
+ (r"(?<=\b)[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})(?=\b)", Number.Hex),
+ (r"(?<=\b)fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}(?=\b)", Number.Hex),
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}(?=\b)", Number.Hex),
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}(?=\b)",
+ Number.Hex),
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}(?=\b)",
+ Number.Hex),
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}(?=\b)",
+ Number.Hex),
+ (
+ r"(?<=\b)::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}"
+ r"[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}"
+ r"[0-9])(?=\b)",
+ Number.Hex,
+ ),
+ (
+ r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-"
+ r"9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-"
+ r"9])(?=\b)",
+ Number.Hex,
+ ),
+
+ # Date time
+ (
+ r"(?:(Sun|Mon|Tue|Wed|Thu|Fri|Sat),\s+)?(0[1-9]|[1-2]?[0-9]|3["
+ r"01])\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+("
+ r"19[0-9]{2}|[2-9][0-9]{3})\s+(2[0-3]|[0-1][0-9]):([0-5][0-9])"
+ r"(?::(60|[0-5][0-9]))?(?:\.\d{1,5})?\s+([-\+][0-9]{2}[0-5][0-"
+ r"9]|\(?(?:UTC?|GMT|(?:E|C|M|P)(?:ST|ET|DT)|[A-IK-Z])\)?)",
+ Name.Decorator,
+ ),
+
+ # RFC-2047 encoded string
+ (
+ r"(=\?)([\w-]+)(\?)([BbQq])(\?)([\[\w!\"#$%&\'()*+,-./:;<=>@[\\"
+ r"\]^_`{|}~]+)(\?=)",
+ bygroups(
+ String.Affix,
+ Name.Constant,
+ String.Affix,
+ Keyword.Constant,
+ String.Affix,
+ Number.Hex,
+ String.Affix
+ )
+ ),
+
+ # others
+ (r'[\s]+', Text.Whitespace),
+ (r'[\S]', Text),
+ ],
+ }
+
+
+class EmailLexer(DelegatingLexer):
+ """
+ Lexer for raw E-mail.
+
+ Additional options accepted:
+
+ `highlight-X-header`
+ Highlight the fields of ``X-`` user-defined email header. (default:
+ ``False``).
+
+ .. versionadded:: 2.5
+ """
+
+ name = "E-mail"
+ aliases = ["email", "eml"]
+ filenames = ["*.eml"]
+ mimetypes = ["message/rfc822"]
+
+ def __init__(self, **options):
+ super(EmailLexer, self).__init__(
+ EmailHeaderLexer, MIMELexer, Comment, **options
+ )
diff --git a/contrib/python/Pygments/py2/pygments/lexers/erlang.py b/contrib/python/Pygments/py2/pygments/lexers/erlang.py
index 07a46c80ff..39fd05a387 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/erlang.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/erlang.py
@@ -163,7 +163,7 @@ class ErlangShellLexer(Lexer):
filenames = ['*.erl-sh']
mimetypes = ['text/x-erl-shellsession']
- _prompt_re = re.compile(r'(?:\([\w@_.]+\))?\d+>(?=\s|\Z)')
+ _prompt_re = re.compile(r'(?:\([\w@_.]+\))?\d+>(?=\s|\Z)')
def get_tokens_unprocessed(self, text):
erlexer = ErlangLexer(**self.options)
@@ -495,7 +495,7 @@ class ElixirConsoleLexer(Lexer):
aliases = ['iex']
mimetypes = ['text/x-elixir-shellsession']
- _prompt_re = re.compile(r'(iex|\.{3})((?:\([\w@_.]+\))?\d+|\(\d+\))?> ')
+ _prompt_re = re.compile(r'(iex|\.{3})((?:\([\w@_.]+\))?\d+|\(\d+\))?> ')
def get_tokens_unprocessed(self, text):
exlexer = ElixirLexer(**self.options)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/freefem.py b/contrib/python/Pygments/py2/pygments/lexers/freefem.py
index 3e9ac8e840..e4543d2175 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/freefem.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/freefem.py
@@ -36,27 +36,27 @@ class FreeFemLexer(CppLexer):
mimetypes = ['text/x-freefem']
# Language operators
- operators = {'+', '-', '*', '.*', '/', './', '%', '^', '^-1', ':', '\''}
+ operators = {'+', '-', '*', '.*', '/', './', '%', '^', '^-1', ':', '\''}
# types
- types = {'bool', 'border', 'complex', 'dmatrix', 'fespace', 'func', 'gslspline',
- 'ifstream', 'int', 'macro', 'matrix', 'mesh', 'mesh3', 'mpiComm',
- 'mpiGroup', 'mpiRequest', 'NewMacro', 'EndMacro', 'ofstream', 'Pmmap',
- 'problem', 'Psemaphore', 'real', 'solve', 'string', 'varf'}
+ types = {'bool', 'border', 'complex', 'dmatrix', 'fespace', 'func', 'gslspline',
+ 'ifstream', 'int', 'macro', 'matrix', 'mesh', 'mesh3', 'mpiComm',
+ 'mpiGroup', 'mpiRequest', 'NewMacro', 'EndMacro', 'ofstream', 'Pmmap',
+ 'problem', 'Psemaphore', 'real', 'solve', 'string', 'varf'}
# finite element spaces
- fespaces = {'BDM1', 'BDM1Ortho', 'Edge03d', 'Edge13d', 'Edge23d', 'FEQF', 'HCT',
- 'P0', 'P03d', 'P0Edge', 'P1', 'P13d', 'P1b', 'P1b3d', 'P1bl', 'P1bl3d',
- 'P1dc', 'P1Edge', 'P1nc', 'P2', 'P23d', 'P2b', 'P2BR', 'P2dc', 'P2Edge',
- 'P2h', 'P2Morley', 'P2pnc', 'P3', 'P3dc', 'P3Edge', 'P4', 'P4dc',
- 'P4Edge', 'P5Edge', 'RT0', 'RT03d', 'RT0Ortho', 'RT1', 'RT1Ortho',
- 'RT2', 'RT2Ortho'}
+ fespaces = {'BDM1', 'BDM1Ortho', 'Edge03d', 'Edge13d', 'Edge23d', 'FEQF', 'HCT',
+ 'P0', 'P03d', 'P0Edge', 'P1', 'P13d', 'P1b', 'P1b3d', 'P1bl', 'P1bl3d',
+ 'P1dc', 'P1Edge', 'P1nc', 'P2', 'P23d', 'P2b', 'P2BR', 'P2dc', 'P2Edge',
+ 'P2h', 'P2Morley', 'P2pnc', 'P3', 'P3dc', 'P3Edge', 'P4', 'P4dc',
+ 'P4Edge', 'P5Edge', 'RT0', 'RT03d', 'RT0Ortho', 'RT1', 'RT1Ortho',
+ 'RT2', 'RT2Ortho'}
# preprocessor
- preprocessor = {'ENDIFMACRO', 'include', 'IFMACRO', 'load'}
+ preprocessor = {'ENDIFMACRO', 'include', 'IFMACRO', 'load'}
# Language keywords
- keywords = {
+ keywords = {
'adj',
'append',
'area',
@@ -169,10 +169,10 @@ class FreeFemLexer(CppLexer):
'x',
'y',
'z'
- }
+ }
# Language shipped functions and class ( )
- functions = {
+ functions = {
'abs',
'acos',
'acosh',
@@ -702,10 +702,10 @@ class FreeFemLexer(CppLexer):
'y0',
'y1',
'yn'
- }
+ }
# function parameters
- parameters = {
+ parameters = {
'A',
'A1',
'abserror',
@@ -849,13 +849,13 @@ class FreeFemLexer(CppLexer):
'WindowIndex',
'which',
'zbound'
- }
+ }
# deprecated
- deprecated = {'fixeborder'}
+ deprecated = {'fixeborder'}
# do not highlight
- suppress_highlight = {
+ suppress_highlight = {
'alignof',
'asm',
'constexpr',
@@ -874,7 +874,7 @@ class FreeFemLexer(CppLexer):
'typeid',
'typename',
'using'
- }
+ }
def get_tokens_unprocessed(self, text):
for index, token, value in CppLexer.get_tokens_unprocessed(self, text):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/haskell.py b/contrib/python/Pygments/py2/pygments/lexers/haskell.py
index 0c0917e721..a01669c5f8 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/haskell.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/haskell.py
@@ -325,10 +325,10 @@ class AgdaLexer(RegexLexer):
# Identifiers
(r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
(r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
- (u'\\b(Set|Prop)[\u2080-\u2089]*\\b', Keyword.Type),
+ (u'\\b(Set|Prop)[\u2080-\u2089]*\\b', Keyword.Type),
# Special Symbols
(r'(\(|\)|\{|\})', Operator),
- (u'(\\.{1,3}|\\||\u03BB|\u2200|\u2192|:|=|->)', Operator.Word),
+ (u'(\\.{1,3}|\\||\u03BB|\u2200|\u2192|:|=|->)', Operator.Word),
# Numbers
(r'\d+[eE][+-]?\d+', Number.Float),
(r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
@@ -481,10 +481,10 @@ class CryptolLexer(RegexLexer):
],
}
- EXTRA_KEYWORDS = {'join', 'split', 'reverse', 'transpose', 'width',
- 'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
- 'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
- 'trace'}
+ EXTRA_KEYWORDS = {'join', 'split', 'reverse', 'transpose', 'width',
+ 'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
+ 'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
+ 'trace'}
def get_tokens_unprocessed(self, text):
stack = ['root']
diff --git a/contrib/python/Pygments/py2/pygments/lexers/haxe.py b/contrib/python/Pygments/py2/pygments/lexers/haxe.py
index b3575080d3..11e190b2de 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/haxe.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/haxe.py
@@ -79,7 +79,7 @@ class HaxeLexer(ExtendedRegexLexer):
if proc in ['error']:
ctx.stack.append('preproc-error')
- yield match.start(), Comment.Preproc, u'#' + proc
+ yield match.start(), Comment.Preproc, u'#' + proc
ctx.pos = match.end()
tokens = {
diff --git a/contrib/python/Pygments/py2/pygments/lexers/hdl.py b/contrib/python/Pygments/py2/pygments/lexers/hdl.py
index b45654ebc8..1e07cfc9c0 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/hdl.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/hdl.py
@@ -175,75 +175,75 @@ class SystemVerilogLexer(RegexLexer):
(r'`[a-zA-Z_]\w*', Name.Constant),
(words((
- 'accept_on', 'alias', 'always', 'always_comb', 'always_ff',
- 'always_latch', 'and', 'assert', 'assign', 'assume', 'automatic',
- 'before', 'begin', 'bind', 'bins', 'binsof', 'bit', 'break', 'buf',
- 'bufif0', 'bufif1', 'byte', 'case', 'casex', 'casez', 'cell',
- 'chandle', 'checker', 'class', 'clocking', 'cmos', 'config',
- 'const', 'constraint', 'context', 'continue', 'cover', 'covergroup',
- 'coverpoint', 'cross', 'deassign', 'default', 'defparam', 'design',
- 'disable', 'dist', 'do', 'edge', 'else', 'end', 'endcase',
- 'endchecker', 'endclass', 'endclocking', 'endconfig', 'endfunction',
- 'endgenerate', 'endgroup', 'endinterface', 'endmodule', 'endpackage',
- 'endprimitive', 'endprogram', 'endproperty', 'endsequence',
- 'endspecify', 'endtable', 'endtask', 'enum', 'event', 'eventually',
- 'expect', 'export', 'extends', 'extern', 'final', 'first_match',
- 'for', 'force', 'foreach', 'forever', 'fork', 'forkjoin', 'function',
- 'generate', 'genvar', 'global', 'highz0', 'highz1', 'if', 'iff',
- 'ifnone', 'ignore_bins', 'illegal_bins', 'implies', 'import',
- 'incdir', 'include', 'initial', 'inout', 'input', 'inside',
- 'instance', 'int', 'integer', 'interface', 'intersect', 'join',
- 'join_any', 'join_none', 'large', 'let', 'liblist', 'library',
- 'local', 'localparam', 'logic', 'longint', 'macromodule', 'matches',
- 'medium', 'modport', 'module', 'nand', 'negedge', 'new', 'nexttime',
- 'nmos', 'nor', 'noshowcancelled', 'not', 'notif0', 'notif1', 'null',
- 'or', 'output', 'package', 'packed', 'parameter', 'pmos', 'posedge',
- 'primitive', 'priority', 'program', 'property', 'protected', 'pull0',
- 'pull1', 'pulldown', 'pullup', 'pulsestyle_ondetect',
- 'pulsestyle_onevent', 'pure', 'rand', 'randc', 'randcase',
- 'randsequence', 'rcmos', 'real', 'realtime', 'ref', 'reg',
- 'reject_on', 'release', 'repeat', 'restrict', 'return', 'rnmos',
- 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 's_always', 's_eventually',
- 's_nexttime', 's_until', 's_until_with', 'scalared', 'sequence',
- 'shortint', 'shortreal', 'showcancelled', 'signed', 'small', 'solve',
- 'specify', 'specparam', 'static', 'string', 'strong', 'strong0',
- 'strong1', 'struct', 'super', 'supply0', 'supply1', 'sync_accept_on',
- 'sync_reject_on', 'table', 'tagged', 'task', 'this', 'throughout',
- 'time', 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1',
- 'tri', 'tri0', 'tri1', 'triand', 'trior', 'trireg', 'type',
- 'typedef', 'union', 'unique', 'unique0', 'unsigned', 'until',
- 'until_with', 'untyped', 'use', 'uwire', 'var', 'vectored',
- 'virtual', 'void', 'wait', 'wait_order', 'wand', 'weak', 'weak0',
- 'weak1', 'while', 'wildcard', 'wire', 'with', 'within', 'wor',
- 'xnor', 'xor'), suffix=r'\b'),
+ 'accept_on', 'alias', 'always', 'always_comb', 'always_ff',
+ 'always_latch', 'and', 'assert', 'assign', 'assume', 'automatic',
+ 'before', 'begin', 'bind', 'bins', 'binsof', 'bit', 'break', 'buf',
+ 'bufif0', 'bufif1', 'byte', 'case', 'casex', 'casez', 'cell',
+ 'chandle', 'checker', 'class', 'clocking', 'cmos', 'config',
+ 'const', 'constraint', 'context', 'continue', 'cover', 'covergroup',
+ 'coverpoint', 'cross', 'deassign', 'default', 'defparam', 'design',
+ 'disable', 'dist', 'do', 'edge', 'else', 'end', 'endcase',
+ 'endchecker', 'endclass', 'endclocking', 'endconfig', 'endfunction',
+ 'endgenerate', 'endgroup', 'endinterface', 'endmodule', 'endpackage',
+ 'endprimitive', 'endprogram', 'endproperty', 'endsequence',
+ 'endspecify', 'endtable', 'endtask', 'enum', 'event', 'eventually',
+ 'expect', 'export', 'extends', 'extern', 'final', 'first_match',
+ 'for', 'force', 'foreach', 'forever', 'fork', 'forkjoin', 'function',
+ 'generate', 'genvar', 'global', 'highz0', 'highz1', 'if', 'iff',
+ 'ifnone', 'ignore_bins', 'illegal_bins', 'implies', 'import',
+ 'incdir', 'include', 'initial', 'inout', 'input', 'inside',
+ 'instance', 'int', 'integer', 'interface', 'intersect', 'join',
+ 'join_any', 'join_none', 'large', 'let', 'liblist', 'library',
+ 'local', 'localparam', 'logic', 'longint', 'macromodule', 'matches',
+ 'medium', 'modport', 'module', 'nand', 'negedge', 'new', 'nexttime',
+ 'nmos', 'nor', 'noshowcancelled', 'not', 'notif0', 'notif1', 'null',
+ 'or', 'output', 'package', 'packed', 'parameter', 'pmos', 'posedge',
+ 'primitive', 'priority', 'program', 'property', 'protected', 'pull0',
+ 'pull1', 'pulldown', 'pullup', 'pulsestyle_ondetect',
+ 'pulsestyle_onevent', 'pure', 'rand', 'randc', 'randcase',
+ 'randsequence', 'rcmos', 'real', 'realtime', 'ref', 'reg',
+ 'reject_on', 'release', 'repeat', 'restrict', 'return', 'rnmos',
+ 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 's_always', 's_eventually',
+ 's_nexttime', 's_until', 's_until_with', 'scalared', 'sequence',
+ 'shortint', 'shortreal', 'showcancelled', 'signed', 'small', 'solve',
+ 'specify', 'specparam', 'static', 'string', 'strong', 'strong0',
+ 'strong1', 'struct', 'super', 'supply0', 'supply1', 'sync_accept_on',
+ 'sync_reject_on', 'table', 'tagged', 'task', 'this', 'throughout',
+ 'time', 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1',
+ 'tri', 'tri0', 'tri1', 'triand', 'trior', 'trireg', 'type',
+ 'typedef', 'union', 'unique', 'unique0', 'unsigned', 'until',
+ 'until_with', 'untyped', 'use', 'uwire', 'var', 'vectored',
+ 'virtual', 'void', 'wait', 'wait_order', 'wand', 'weak', 'weak0',
+ 'weak1', 'while', 'wildcard', 'wire', 'with', 'within', 'wor',
+ 'xnor', 'xor'), suffix=r'\b'),
Keyword),
(words((
- '`__FILE__', '`__LINE__', '`begin_keywords', '`celldefine',
- '`default_nettype', '`define', '`else', '`elsif', '`end_keywords',
- '`endcelldefine', '`endif', '`ifdef', '`ifndef', '`include',
- '`line', '`nounconnected_drive', '`pragma', '`resetall',
- '`timescale', '`unconnected_drive', '`undef', '`undefineall'),
+ '`__FILE__', '`__LINE__', '`begin_keywords', '`celldefine',
+ '`default_nettype', '`define', '`else', '`elsif', '`end_keywords',
+ '`endcelldefine', '`endif', '`ifdef', '`ifndef', '`include',
+ '`line', '`nounconnected_drive', '`pragma', '`resetall',
+ '`timescale', '`unconnected_drive', '`undef', '`undefineall'),
suffix=r'\b'),
Comment.Preproc),
(words((
- '$display', '$displayb', '$displayh', '$displayo', '$dumpall',
- '$dumpfile', '$dumpflush', '$dumplimit', '$dumpoff', '$dumpon',
- '$dumpports', '$dumpportsall', '$dumpportsflush', '$dumpportslimit',
- '$dumpportsoff', '$dumpportson', '$dumpvars', '$fclose',
- '$fdisplay', '$fdisplayb', '$fdisplayh', '$fdisplayo', '$feof',
- '$ferror', '$fflush', '$fgetc', '$fgets', '$finish', '$fmonitor',
- '$fmonitorb', '$fmonitorh', '$fmonitoro', '$fopen', '$fread',
- '$fscanf', '$fseek', '$fstrobe', '$fstrobeb', '$fstrobeh',
+ '$display', '$displayb', '$displayh', '$displayo', '$dumpall',
+ '$dumpfile', '$dumpflush', '$dumplimit', '$dumpoff', '$dumpon',
+ '$dumpports', '$dumpportsall', '$dumpportsflush', '$dumpportslimit',
+ '$dumpportsoff', '$dumpportson', '$dumpvars', '$fclose',
+ '$fdisplay', '$fdisplayb', '$fdisplayh', '$fdisplayo', '$feof',
+ '$ferror', '$fflush', '$fgetc', '$fgets', '$finish', '$fmonitor',
+ '$fmonitorb', '$fmonitorh', '$fmonitoro', '$fopen', '$fread',
+ '$fscanf', '$fseek', '$fstrobe', '$fstrobeb', '$fstrobeh',
'$fstrobeo', '$ftell', '$fwrite', '$fwriteb', '$fwriteh', '$fwriteo',
'$monitor', '$monitorb', '$monitorh', '$monitoro', '$monitoroff',
- '$monitoron', '$plusargs', '$random', '$readmemb', '$readmemh',
- '$rewind', '$sformat', '$sformatf', '$sscanf', '$strobe',
- '$strobeb', '$strobeh', '$strobeo', '$swrite', '$swriteb',
- '$swriteh', '$swriteo', '$test', '$ungetc', '$value$plusargs',
- '$write', '$writeb', '$writeh', '$writememb', '$writememh',
- '$writeo'), suffix=r'\b'),
+ '$monitoron', '$plusargs', '$random', '$readmemb', '$readmemh',
+ '$rewind', '$sformat', '$sformatf', '$sscanf', '$strobe',
+ '$strobeb', '$strobeh', '$strobeo', '$swrite', '$swriteb',
+ '$swriteh', '$swriteo', '$test', '$ungetc', '$value$plusargs',
+ '$write', '$writeb', '$writeh', '$writememb', '$writememh',
+ '$writeo'), suffix=r'\b'),
Name.Builtin),
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
diff --git a/contrib/python/Pygments/py2/pygments/lexers/html.py b/contrib/python/Pygments/py2/pygments/lexers/html.py
index cbef4f7e98..3b3fe329f4 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/html.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/html.py
@@ -244,7 +244,7 @@ class XsltLexer(XmlLexer):
filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
mimetypes = ['application/xsl+xml', 'application/xslt+xml']
- EXTRA_KEYWORDS = {
+ EXTRA_KEYWORDS = {
'apply-imports', 'apply-templates', 'attribute',
'attribute-set', 'call-template', 'choose', 'comment',
'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
@@ -253,7 +253,7 @@ class XsltLexer(XmlLexer):
'preserve-space', 'processing-instruction', 'sort',
'strip-space', 'stylesheet', 'template', 'text', 'transform',
'value-of', 'variable', 'when', 'with-param'
- }
+ }
def get_tokens_unprocessed(self, text):
for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/javascript.py b/contrib/python/Pygments/py2/pygments/lexers/javascript.py
index e9cf672278..e701860e91 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/javascript.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/javascript.py
@@ -372,7 +372,7 @@ class DartLexer(RegexLexer):
(r'\b(bool|double|dynamic|int|num|Object|String|void)\b', Keyword.Type),
(r'\b(false|null|true)\b', Keyword.Constant),
(r'[~!%^&*+=|?:<>/-]|as\b', Operator),
- (r'@[a-zA-Z_$]\w*', Name.Decorator),
+ (r'@[a-zA-Z_$]\w*', Name.Decorator),
(r'[a-zA-Z_$]\w*:', Name.Label),
(r'[a-zA-Z_$]\w*', Name),
(r'[(){}\[\],.;]', Punctuation),
@@ -1457,20 +1457,20 @@ class EarlGreyLexer(RegexLexer):
(r'8r[0-7]+', Number.Oct),
(r'2r[01]+', Number.Bin),
(r'16r[a-fA-F0-9]+', Number.Hex),
- (r'([3-79]|[12][0-9]|3[0-6])r[a-zA-Z\d]+(\.[a-zA-Z\d]+)?',
- Number.Radix),
+ (r'([3-79]|[12][0-9]|3[0-6])r[a-zA-Z\d]+(\.[a-zA-Z\d]+)?',
+ Number.Radix),
(r'\d+', Number.Integer)
],
}
-
+
class JuttleLexer(RegexLexer):
"""
For `Juttle`_ source code.
.. _Juttle: https://github.com/juttle/juttle
- .. versionadded:: 2.2
+ .. versionadded:: 2.2
"""
name = 'Juttle'
@@ -1505,24 +1505,24 @@ class JuttleLexer(RegexLexer):
r'(\d+(\.\d*)?|\.\d+)(ms|[smhdwMy])?):', String.Moment),
(r':\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(\.\d*)?)?'
r'(Z|[+-]\d{2}:\d{2}|[+-]\d{4})?:', String.Moment),
- (r':((\d+(\.\d*)?|\.\d+)[ ]+)?(millisecond|second|minute|hour|'
- r'day|week|month|year)[s]?'
- r'(([ ]+and[ ]+(\d+[ ]+)?(millisecond|second|minute|hour|'
- r'day|week|month|year)[s]?)'
+ (r':((\d+(\.\d*)?|\.\d+)[ ]+)?(millisecond|second|minute|hour|'
+ r'day|week|month|year)[s]?'
+ r'(([ ]+and[ ]+(\d+[ ]+)?(millisecond|second|minute|hour|'
+ r'day|week|month|year)[s]?)'
r'|[ ]+(ago|from[ ]+now))*:', String.Moment),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
r'(==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(import|return|continue|if|else)\b', Keyword, 'slashstartsregex'),
- (r'(var|const|function|reducer|sub|input)\b', Keyword.Declaration,
- 'slashstartsregex'),
+ (r'(var|const|function|reducer|sub|input)\b', Keyword.Declaration,
+ 'slashstartsregex'),
(r'(batch|emit|filter|head|join|keep|pace|pass|put|read|reduce|remove|'
- r'sequence|skip|sort|split|tail|unbatch|uniq|view|write)\b',
- Keyword.Reserved),
+ r'sequence|skip|sort|split|tail|unbatch|uniq|view|write)\b',
+ Keyword.Reserved),
(r'(true|false|null|Infinity)\b', Keyword.Constant),
- (r'(Array|Date|Juttle|Math|Number|Object|RegExp|String)\b',
- Name.Builtin),
+ (r'(Array|Date|Juttle|Math|Number|Object|RegExp|String)\b',
+ Name.Builtin),
(JS_IDENT, Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'[0-9]+', Number.Integer),
diff --git a/contrib/python/Pygments/py2/pygments/lexers/jvm.py b/contrib/python/Pygments/py2/pygments/lexers/jvm.py
index 5588b79660..cabb7f909e 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/jvm.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/jvm.py
@@ -26,7 +26,7 @@ __all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
class JavaLexer(RegexLexer):
"""
- For `Java <https://www.oracle.com/technetwork/java/>`_ source code.
+ For `Java <https://www.oracle.com/technetwork/java/>`_ source code.
"""
name = 'Java'
@@ -50,7 +50,7 @@ class JavaLexer(RegexLexer):
(r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
r'((?:[^\W\d]|\$)[\w$]*)' # method name
r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Punctuation)),
+ bygroups(using(this), Name.Function, Text, Punctuation)),
(r'@[^\W\d][\w.]*', Name.Decorator),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
@@ -61,14 +61,14 @@ class JavaLexer(RegexLexer):
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
- (r'(var)(\s+)', bygroups(Keyword.Declaration, Text),
- 'var'),
+ (r'(var)(\s+)', bygroups(Keyword.Declaration, Text),
+ 'var'),
(r'(import(?:\s+static)?)(\s+)', bygroups(Keyword.Namespace, Text),
'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation,
- Name.Attribute)),
+ (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation,
+ Name.Attribute)),
(r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label),
(r'([^\W\d]|\$)[\w$]*', Name),
(r'([0-9][0-9_]*\.([0-9][0-9_]*)?|'
@@ -83,16 +83,16 @@ class JavaLexer(RegexLexer):
(r'0[bB][01][01_]*[lL]?', Number.Bin),
(r'0[0-7_]+[lL]?', Number.Oct),
(r'0|[1-9][0-9_]*[lL]?', Number.Integer),
- (r'[~^*!%&\[\]<>|+=/?-]', Operator),
- (r'[{}();:.,]', Punctuation),
+ (r'[~^*!%&\[\]<>|+=/?-]', Operator),
+ (r'[{}();:.,]', Punctuation),
(r'\n', Text)
],
'class': [
(r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop')
],
- 'var': [
- (r'([^\W\d]|\$)[\w$]*', Name, '#pop')
- ],
+ 'var': [
+ (r'([^\W\d]|\$)[\w$]*', Name, '#pop')
+ ],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
@@ -111,7 +111,7 @@ class AspectJLexer(JavaLexer):
filenames = ['*.aj']
mimetypes = ['text/x-aspectj']
- aj_keywords = {
+ aj_keywords = {
'aspect', 'pointcut', 'privileged', 'call', 'execution',
'initialization', 'preinitialization', 'handler', 'get', 'set',
'staticinitialization', 'target', 'args', 'within', 'withincode',
@@ -121,9 +121,9 @@ class AspectJLexer(JavaLexer):
'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
- }
- aj_inter_type = {'parents:', 'warning:', 'error:', 'soft:', 'precedence:'}
- aj_inter_type_annotation = {'@type', '@method', '@constructor', '@field'}
+ }
+ aj_inter_type = {'parents:', 'warning:', 'error:', 'soft:', 'precedence:'}
+ aj_inter_type_annotation = {'@type', '@method', '@constructor', '@field'}
def get_tokens_unprocessed(self, text):
for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
@@ -271,7 +271,7 @@ class ScalaLexer(RegexLexer):
# method names
(r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
(r'[^\S\n]+', Text),
- include('comments'),
+ include('comments'),
(u'@%s' % idrest, Name.Decorator),
(u'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|'
u'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|'
@@ -306,17 +306,17 @@ class ScalaLexer(RegexLexer):
],
'class': [
(u'(%s|%s|`[^`]+`)(\\s*)(\\[)' % (idrest, op),
- bygroups(Name.Class, Text, Operator), ('#pop', 'typeparam')),
+ bygroups(Name.Class, Text, Operator), ('#pop', 'typeparam')),
(r'\s+', Text),
- include('comments'),
+ include('comments'),
(r'\{', Operator, '#pop'),
(r'\(', Operator, '#pop'),
(u'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'),
],
'type': [
(r'\s+', Text),
- include('comments'),
- (r'<[%:]|>:|[#_]|\bforSome\b|\btype\b', Keyword),
+ include('comments'),
+ (r'<[%:]|>:|[#_]|\bforSome\b|\btype\b', Keyword),
(u'([,);}]|=>|=|\u21d2)(\\s*)', bygroups(Operator, Text), '#pop'),
(r'[({]', Operator, '#push'),
(u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)(\\[)' %
@@ -328,18 +328,18 @@ class ScalaLexer(RegexLexer):
(u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
],
'typeparam': [
- (r'\s+', Text),
- include('comments'),
- (r',+', Punctuation),
- (u'<[%:]|=>|>:|[#_\u21D2]|\bforSome\b|\btype\b', Keyword),
+ (r'\s+', Text),
+ include('comments'),
+ (r',+', Punctuation),
+ (u'<[%:]|=>|>:|[#_\u21D2]|\bforSome\b|\btype\b', Keyword),
(r'([\])}])', Operator, '#pop'),
(r'[(\[{]', Operator, '#push'),
(u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
],
- 'comments': [
- (r'//.*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- ],
+ 'comments': [
+ (r'//.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ ],
'comment': [
(r'[^/*]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
diff --git a/contrib/python/Pygments/py2/pygments/lexers/lisp.py b/contrib/python/Pygments/py2/pygments/lexers/lisp.py
index 601d5a5f27..d7f5b79a6a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/lisp.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/lisp.py
@@ -1554,7 +1554,7 @@ class EmacsLispLexer(RegexLexer):
# Take a deep breath...
symbol = r'((?:%s)(?:%s)*)' % (nonmacro, constituent)
- macros = {
+ macros = {
'atomic-change-group', 'case', 'block', 'cl-block', 'cl-callf', 'cl-callf2',
'cl-case', 'cl-decf', 'cl-declaim', 'cl-declare',
'cl-define-compiler-macro', 'cl-defmacro', 'cl-defstruct',
@@ -1601,17 +1601,17 @@ class EmacsLispLexer(RegexLexer):
'with-tramp-file-property', 'with-tramp-progress-reporter',
'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv',
'return-from',
- }
+ }
- special_forms = {
+ special_forms = {
'and', 'catch', 'cond', 'condition-case', 'defconst', 'defvar',
'function', 'if', 'interactive', 'let', 'let*', 'or', 'prog1',
'prog2', 'progn', 'quote', 'save-current-buffer', 'save-excursion',
'save-restriction', 'setq', 'setq-default', 'subr-arity',
'unwind-protect', 'while',
- }
+ }
- builtin_function = {
+ builtin_function = {
'%', '*', '+', '-', '/', '/=', '1+', '1-', '<', '<=', '=', '>', '>=',
'Snarf-documentation', 'abort-recursive-edit', 'abs',
'accept-process-output', 'access-file', 'accessible-keymaps', 'acos',
@@ -1937,9 +1937,9 @@ class EmacsLispLexer(RegexLexer):
'split-window-internal', 'sqrt', 'standard-case-table',
'standard-category-table', 'standard-syntax-table', 'start-kbd-macro',
'start-process', 'stop-process', 'store-kbd-macro-event', 'string',
- 'string=', 'string<', 'string>', 'string-as-multibyte',
- 'string-as-unibyte', 'string-bytes', 'string-collate-equalp',
- 'string-collate-lessp', 'string-equal', 'string-greaterp',
+ 'string=', 'string<', 'string>', 'string-as-multibyte',
+ 'string-as-unibyte', 'string-bytes', 'string-collate-equalp',
+ 'string-collate-lessp', 'string-equal', 'string-greaterp',
'string-lessp', 'string-make-multibyte', 'string-make-unibyte',
'string-match', 'string-to-char', 'string-to-multibyte',
'string-to-number', 'string-to-syntax', 'string-to-unibyte',
@@ -2051,23 +2051,23 @@ class EmacsLispLexer(RegexLexer):
'xw-color-values', 'xw-display-color-p', 'xw-display-color-p',
'yes-or-no-p', 'zlib-available-p', 'zlib-decompress-region',
'forward-point',
- }
+ }
- builtin_function_highlighted = {
+ builtin_function_highlighted = {
'defvaralias', 'provide', 'require',
'with-no-warnings', 'define-widget', 'with-electric-help',
'throw', 'defalias', 'featurep'
- }
+ }
- lambda_list_keywords = {
+ lambda_list_keywords = {
'&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
'&rest', '&whole',
- }
+ }
- error_keywords = {
+ error_keywords = {
'cl-assert', 'cl-check-type', 'error', 'signal',
'user-error', 'warn',
- }
+ }
def get_tokens_unprocessed(self, text):
stack = ['root']
@@ -2226,7 +2226,7 @@ class ShenLexer(RegexLexer):
BUILTINS_ANYWHERE = ('where', 'skip', '>>', '_', '!', '<e>', '<!>')
- MAPPINGS = {s: Keyword for s in DECLARATIONS}
+ MAPPINGS = {s: Keyword for s in DECLARATIONS}
MAPPINGS.update((s, Name.Builtin) for s in BUILTINS)
MAPPINGS.update((s, Keyword) for s in SPECIAL_FORMS)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/markup.py b/contrib/python/Pygments/py2/pygments/lexers/markup.py
index ad2491ad1f..5901025f14 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/markup.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/markup.py
@@ -222,9 +222,9 @@ class RstLexer(RegexLexer):
Punctuation, Text, using(this, state='inline'))),
# Comments
(r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
- # Field list marker
- (r'^( *)(:(?:\\\\|\\:|[^:\n])+:(?=\s))([ \t]*)',
- bygroups(Text, Name.Class, Text)),
+ # Field list marker
+ (r'^( *)(:(?:\\\\|\\:|[^:\n])+:(?=\s))([ \t]*)',
+ bygroups(Text, Name.Class, Text)),
# Definition list
(r'^(\S.*(?<!::)\n)((?:(?: +.*)\n)+)',
bygroups(using(this, state='inline'), using(this, state='inline'))),
diff --git a/contrib/python/Pygments/py2/pygments/lexers/matlab.py b/contrib/python/Pygments/py2/pygments/lexers/matlab.py
index 64dd3a3c2d..6ff588c61b 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/matlab.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/matlab.py
@@ -72,8 +72,8 @@ class MatlabLexer(RegexLexer):
"hilb", "invhilb", "magic", "pascal", "rosser", "toeplitz", "vander",
"wilkinson")
- _operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\'
-
+ _operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\'
+
tokens = {
'root': [
# line starting with '!' is sent as a system command. not sure what
@@ -81,17 +81,17 @@ class MatlabLexer(RegexLexer):
(r'^!.*', String.Other),
(r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
(r'%.*$', Comment),
- (r'^\s*function\b', Keyword, 'deffunc'),
+ (r'^\s*function\b', Keyword, 'deffunc'),
# from 'iskeyword' on version 7.11 (R2010):
- # Check that there is no preceding dot, as keywords are valid field
- # names.
- (words(('break', 'case', 'catch', 'classdef', 'continue', 'else',
- 'elseif', 'end', 'enumerated', 'events', 'for', 'function',
- 'global', 'if', 'methods', 'otherwise', 'parfor',
- 'persistent', 'properties', 'return', 'spmd', 'switch',
- 'try', 'while'),
- prefix=r'(?<!\.)', suffix=r'\b'),
+ # Check that there is no preceding dot, as keywords are valid field
+ # names.
+ (words(('break', 'case', 'catch', 'classdef', 'continue', 'else',
+ 'elseif', 'end', 'enumerated', 'events', 'for', 'function',
+ 'global', 'if', 'methods', 'otherwise', 'parfor',
+ 'persistent', 'properties', 'return', 'spmd', 'switch',
+ 'try', 'while'),
+ prefix=r'(?<!\.)', suffix=r'\b'),
Keyword),
("(" + "|".join(elfun + specfun + elmat) + r')\b', Name.Builtin),
@@ -99,23 +99,23 @@ class MatlabLexer(RegexLexer):
# line continuation with following comment:
(r'\.\.\..*$', Comment),
- # command form:
- # "How MATLAB Recognizes Command Syntax" specifies that an operator
- # is recognized if it is either surrounded by spaces or by no
- # spaces on both sides; only the former case matters for us. (This
- # allows distinguishing `cd ./foo` from `cd ./ foo`.)
- (r'(?:^|(?<=;))\s*\w+\s+(?!=|\(|(%s)\s+)' % _operators, Name,
- 'commandargs'),
-
+ # command form:
+ # "How MATLAB Recognizes Command Syntax" specifies that an operator
+ # is recognized if it is either surrounded by spaces or by no
+ # spaces on both sides; only the former case matters for us. (This
+ # allows distinguishing `cd ./foo` from `cd ./ foo`.)
+ (r'(?:^|(?<=;))\s*\w+\s+(?!=|\(|(%s)\s+)' % _operators, Name,
+ 'commandargs'),
+
# operators:
- (_operators, Operator),
-
- # numbers (must come before punctuation to handle `.5`; cannot use
- # `\b` due to e.g. `5. + .5`).
- (r'(?<!\w)((\d+\.\d*)|(\d*\.\d+))([eEf][+-]?\d+)?(?!\w)', Number.Float),
- (r'\b\d+[eEf][+-]?[0-9]+\b', Number.Float),
- (r'\b\d+\b', Number.Integer),
-
+ (_operators, Operator),
+
+ # numbers (must come before punctuation to handle `.5`; cannot use
+ # `\b` due to e.g. `5. + .5`).
+ (r'(?<!\w)((\d+\.\d*)|(\d*\.\d+))([eEf][+-]?\d+)?(?!\w)', Number.Float),
+ (r'\b\d+[eEf][+-]?[0-9]+\b', Number.Float),
+ (r'\b\d+\b', Number.Integer),
+
# punctuation:
(r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
(r'=|:|;', Punctuation),
@@ -124,7 +124,7 @@ class MatlabLexer(RegexLexer):
# (not great, but handles common cases...)
(r'(?<=[\w)\].])\'+', Operator),
- (r'"(""|[^"])*"', String),
+ (r'"(""|[^"])*"', String),
(r'(?<![\w)\].])\'', String, 'string'),
(r'[a-zA-Z_]\w*', Name),
@@ -143,28 +143,28 @@ class MatlabLexer(RegexLexer):
# function with no args
(r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
],
- 'string': [
- (r"[^']*'", String, '#pop'),
- ],
- 'commandargs': [
- ("'[^']*'", String),
- ("[^';\n]+", String),
- (";?\n?", Punctuation, '#pop'),
- ]
+ 'string': [
+ (r"[^']*'", String, '#pop'),
+ ],
+ 'commandargs': [
+ ("'[^']*'", String),
+ ("[^';\n]+", String),
+ (";?\n?", Punctuation, '#pop'),
+ ]
}
def analyse_text(text):
- # function declaration.
- first_non_comment = next((line for line in text.splitlines()
- if not re.match(r'^\s*%', text)), '').strip()
- if (first_non_comment.startswith('function')
- and '{' not in first_non_comment):
- return 1.
- # comment
- elif re.match(r'^\s*%', text, re.M):
+ # function declaration.
+ first_non_comment = next((line for line in text.splitlines()
+ if not re.match(r'^\s*%', text)), '').strip()
+ if (first_non_comment.startswith('function')
+ and '{' not in first_non_comment):
+ return 1.
+ # comment
+ elif re.match(r'^\s*%', text, re.M):
return 0.2
- # system cmd
- elif re.match(r'^!\w+', text, re.M):
+ # system cmd
+ elif re.match(r'^!\w+', text, re.M):
return 0.2
@@ -564,7 +564,7 @@ class OctaveLexer(RegexLexer):
'root': [
# We should look into multiline comments
(r'[%#].*$', Comment),
- (r'^\s*function\b', Keyword, 'deffunc'),
+ (r'^\s*function\b', Keyword, 'deffunc'),
# from 'iskeyword' on hg changeset 8cc154f45e37
(words((
@@ -637,7 +637,7 @@ class ScilabLexer(RegexLexer):
tokens = {
'root': [
(r'//.*?$', Comment.Single),
- (r'^\s*function\b', Keyword, 'deffunc'),
+ (r'^\s*function\b', Keyword, 'deffunc'),
(words((
'__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
diff --git a/contrib/python/Pygments/py2/pygments/lexers/mime.py b/contrib/python/Pygments/py2/pygments/lexers/mime.py
index 95979f35c5..aadfdc9c7a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/mime.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/mime.py
@@ -1,226 +1,226 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.mime
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Multipurpose Internet Mail Extensions (MIME) data.
-
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.lexers import get_lexer_for_mimetype
-from pygments.token import Text, Name, String, Operator, Comment, Other
-from pygments.util import get_int_opt, ClassNotFound
-
-__all__ = ["MIMELexer"]
-
-
-class MIMELexer(RegexLexer):
- """
- Lexer for Multipurpose Internet Mail Extensions (MIME) data. This lexer is
- designed to process the nested mulitpart data.
-
- It assumes that the given data contains both header and body (and is
- splitted by empty line). If no valid header is found, then the entire data
- would be treated as body.
-
- Additional options accepted:
-
- `MIME-max-level`
- Max recurssion level for nested MIME structure. Any negative number
- would treated as unlimited. (default: -1)
-
- `Content-Type`
- Treat the data as specific content type. Useful when header is
- missing, or this lexer would try to parse from header. (default:
- `text/plain`)
-
- `Multipart-Boundary`
- Set the default multipart boundary delimiter. This option is only used
- when `Content-Type` is `multipart` and header is missing. This lexer
- would try to parse from header by default. (default: None)
-
- `Content-Transfer-Encoding`
- Treat the data as specific encoding. Or this lexer would try to parse
- from header by default. (default: None)
-
- .. versionadded:: 2.5
- """
-
- name = "MIME"
- aliases = ["mime"]
- mimetypes = ["multipart/mixed",
- "multipart/related",
- "multipart/alternative"]
-
- def __init__(self, **options):
- super(MIMELexer, self).__init__(**options)
- self.boundary = options.get("Multipart-Boundary")
- self.content_transfer_encoding = options.get("Content_Transfer_Encoding")
- self.content_type = options.get("Content_Type", "text/plain")
- self.max_nested_level = get_int_opt(options, "MIME-max-level", -1)
-
- def analyse_text(text):
- try:
- header, body = text.strip().split("\n\n", 1)
- if not body.strip():
- return 0.1
-
- invalid_headers = MIMELexer.tokens["header"].sub("", header)
- if invalid_headers.strip():
- return 0.1
- else:
- return 1
-
- except ValueError:
- return 0.1
-
- def get_header_tokens(self, match):
- field = match.group(1)
-
- if field.lower() in self.attention_headers:
- yield match.start(1), Name.Tag, field + ":"
- yield match.start(2), Text.Whitespace, match.group(2)
-
- pos = match.end(2)
- body = match.group(3)
- for i, t, v in self.get_tokens_unprocessed(body, ("root", field.lower())):
- yield pos + i, t, v
-
- else:
- yield match.start(), Comment, match.group()
-
- def get_body_tokens(self, match):
- pos_body_start = match.start()
- entire_body = match.group()
-
- # skip first newline
- if entire_body[0] == '\n':
- yield pos_body_start, Text.Whitespace, u'\n'
- pos_body_start = pos_body_start + 1
- entire_body = entire_body[1:]
-
- # if it is not a mulitpart
- if not self.content_type.startswith("multipart") or not self.boundary:
- for i, t, v in self.get_bodypart_tokens(entire_body):
- yield pos_body_start + i, t, v
- return
-
- # find boundary
- bdry_pattern = r"^--%s(--)?\n" % re.escape(self.boundary)
- bdry_matcher = re.compile(bdry_pattern, re.MULTILINE)
-
- # some data has prefix text before first boundary
- m = bdry_matcher.search(entire_body)
- if m:
- pos_part_start = pos_body_start + m.end()
- pos_iter_start = lpos_end = m.end()
- yield pos_body_start, Text, entire_body[:m.start()]
- yield pos_body_start + lpos_end, String.Delimiter, m.group()
- else:
- pos_part_start = pos_body_start
- pos_iter_start = 0
-
- # process tokens of each body part
- for m in bdry_matcher.finditer(entire_body, pos_iter_start):
- # bodypart
- lpos_start = pos_part_start - pos_body_start
- lpos_end = m.start()
- part = entire_body[lpos_start:lpos_end]
- for i, t, v in self.get_bodypart_tokens(part):
- yield pos_part_start + i, t, v
-
- # boundary
- yield pos_body_start + lpos_end, String.Delimiter, m.group()
- pos_part_start = pos_body_start + m.end()
-
- # some data has suffix text after last boundary
- lpos_start = pos_part_start - pos_body_start
- if lpos_start != len(entire_body):
- yield pos_part_start, Text, entire_body[lpos_start:]
-
- def get_bodypart_tokens(self, text):
- # return if:
- # * no content
- # * no content type specific
- # * content encoding is not readable
- # * max recurrsion exceed
- if not text.strip() or not self.content_type:
- return [(0, Other, text)]
-
- cte = self.content_transfer_encoding
- if cte and cte not in {"8bit", "7bit", "quoted-printable"}:
- return [(0, Other, text)]
-
- if self.max_nested_level == 0:
- return [(0, Other, text)]
-
- # get lexer
- try:
- lexer = get_lexer_for_mimetype(self.content_type)
- except ClassNotFound:
- return [(0, Other, text)]
-
- if isinstance(lexer, type(self)):
- lexer.max_nested_level = self.max_nested_level - 1
-
- return lexer.get_tokens_unprocessed(text)
-
- def store_content_type(self, match):
- self.content_type = match.group(1)
-
- prefix_len = match.start(1) - match.start(0)
- yield match.start(0), Text.Whitespace, match.group(0)[:prefix_len]
- yield match.start(1), Name.Label, match.group(2)
- yield match.end(2), String.Delimiter, u"/"
- yield match.start(3), Name.Label, match.group(3)
-
- def get_content_type_subtokens(self, match):
- yield match.start(1), Text, match.group(1)
- yield match.start(2), Text.Whitespace, match.group(2)
- yield match.start(3), Name.Attribute, match.group(3)
- yield match.start(4), Operator, match.group(4)
- yield match.start(5), String, match.group(5)
-
- if match.group(3).lower() == "boundary":
- boundary = match.group(5).strip()
- if boundary[0] == '"' and boundary[-1] == '"':
- boundary = boundary[1:-1]
- self.boundary = boundary
-
- def store_content_transfer_encoding(self, match):
- self.content_transfer_encoding = match.group(0).lower()
- yield match.start(0), Name.Constant, match.group(0)
-
- attention_headers = {"content-type", "content-transfer-encoding"}
-
- tokens = {
- "root": [
- (r"^([\w-]+):( *)([\s\S]*?\n)(?![ \t])", get_header_tokens),
- (r"^$[\s\S]+", get_body_tokens),
- ],
- "header": [
- # folding
- (r"\n[ \t]", Text.Whitespace),
- (r"\n(?![ \t])", Text.Whitespace, "#pop"),
- ],
- "content-type": [
- include("header"),
- (
- r"^\s*((multipart|application|audio|font|image|model|text|video"
- r"|message)/([\w-]+))",
- store_content_type,
- ),
- (r'(;)((?:[ \t]|\n[ \t])*)([\w:-]+)(=)([\s\S]*?)(?=;|\n(?![ \t]))',
- get_content_type_subtokens),
- (r';[ \t]*\n(?![ \t])', Text, '#pop'),
- ],
- "content-transfer-encoding": [
- include("header"),
- (r"([\w-]+)", store_content_transfer_encoding),
- ],
- }
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.mime
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Multipurpose Internet Mail Extensions (MIME) data.
+
+ :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.lexers import get_lexer_for_mimetype
+from pygments.token import Text, Name, String, Operator, Comment, Other
+from pygments.util import get_int_opt, ClassNotFound
+
+__all__ = ["MIMELexer"]
+
+
+class MIMELexer(RegexLexer):
+ """
+ Lexer for Multipurpose Internet Mail Extensions (MIME) data. This lexer is
+ designed to process the nested mulitpart data.
+
+ It assumes that the given data contains both header and body (and is
+ splitted by empty line). If no valid header is found, then the entire data
+ would be treated as body.
+
+ Additional options accepted:
+
+ `MIME-max-level`
+ Max recurssion level for nested MIME structure. Any negative number
+ would treated as unlimited. (default: -1)
+
+ `Content-Type`
+ Treat the data as specific content type. Useful when header is
+ missing, or this lexer would try to parse from header. (default:
+ `text/plain`)
+
+ `Multipart-Boundary`
+ Set the default multipart boundary delimiter. This option is only used
+ when `Content-Type` is `multipart` and header is missing. This lexer
+ would try to parse from header by default. (default: None)
+
+ `Content-Transfer-Encoding`
+ Treat the data as specific encoding. Or this lexer would try to parse
+ from header by default. (default: None)
+
+ .. versionadded:: 2.5
+ """
+
+ name = "MIME"
+ aliases = ["mime"]
+ mimetypes = ["multipart/mixed",
+ "multipart/related",
+ "multipart/alternative"]
+
+ def __init__(self, **options):
+ super(MIMELexer, self).__init__(**options)
+ self.boundary = options.get("Multipart-Boundary")
+ self.content_transfer_encoding = options.get("Content_Transfer_Encoding")
+ self.content_type = options.get("Content_Type", "text/plain")
+ self.max_nested_level = get_int_opt(options, "MIME-max-level", -1)
+
+ def analyse_text(text):
+ try:
+ header, body = text.strip().split("\n\n", 1)
+ if not body.strip():
+ return 0.1
+
+ invalid_headers = MIMELexer.tokens["header"].sub("", header)
+ if invalid_headers.strip():
+ return 0.1
+ else:
+ return 1
+
+ except ValueError:
+ return 0.1
+
+ def get_header_tokens(self, match):
+ field = match.group(1)
+
+ if field.lower() in self.attention_headers:
+ yield match.start(1), Name.Tag, field + ":"
+ yield match.start(2), Text.Whitespace, match.group(2)
+
+ pos = match.end(2)
+ body = match.group(3)
+ for i, t, v in self.get_tokens_unprocessed(body, ("root", field.lower())):
+ yield pos + i, t, v
+
+ else:
+ yield match.start(), Comment, match.group()
+
+ def get_body_tokens(self, match):
+ pos_body_start = match.start()
+ entire_body = match.group()
+
+ # skip first newline
+ if entire_body[0] == '\n':
+ yield pos_body_start, Text.Whitespace, u'\n'
+ pos_body_start = pos_body_start + 1
+ entire_body = entire_body[1:]
+
+ # if it is not a mulitpart
+ if not self.content_type.startswith("multipart") or not self.boundary:
+ for i, t, v in self.get_bodypart_tokens(entire_body):
+ yield pos_body_start + i, t, v
+ return
+
+ # find boundary
+ bdry_pattern = r"^--%s(--)?\n" % re.escape(self.boundary)
+ bdry_matcher = re.compile(bdry_pattern, re.MULTILINE)
+
+ # some data has prefix text before first boundary
+ m = bdry_matcher.search(entire_body)
+ if m:
+ pos_part_start = pos_body_start + m.end()
+ pos_iter_start = lpos_end = m.end()
+ yield pos_body_start, Text, entire_body[:m.start()]
+ yield pos_body_start + lpos_end, String.Delimiter, m.group()
+ else:
+ pos_part_start = pos_body_start
+ pos_iter_start = 0
+
+ # process tokens of each body part
+ for m in bdry_matcher.finditer(entire_body, pos_iter_start):
+ # bodypart
+ lpos_start = pos_part_start - pos_body_start
+ lpos_end = m.start()
+ part = entire_body[lpos_start:lpos_end]
+ for i, t, v in self.get_bodypart_tokens(part):
+ yield pos_part_start + i, t, v
+
+ # boundary
+ yield pos_body_start + lpos_end, String.Delimiter, m.group()
+ pos_part_start = pos_body_start + m.end()
+
+ # some data has suffix text after last boundary
+ lpos_start = pos_part_start - pos_body_start
+ if lpos_start != len(entire_body):
+ yield pos_part_start, Text, entire_body[lpos_start:]
+
+ def get_bodypart_tokens(self, text):
+ # return if:
+ # * no content
+ # * no content type specific
+ # * content encoding is not readable
+ # * max recurrsion exceed
+ if not text.strip() or not self.content_type:
+ return [(0, Other, text)]
+
+ cte = self.content_transfer_encoding
+ if cte and cte not in {"8bit", "7bit", "quoted-printable"}:
+ return [(0, Other, text)]
+
+ if self.max_nested_level == 0:
+ return [(0, Other, text)]
+
+ # get lexer
+ try:
+ lexer = get_lexer_for_mimetype(self.content_type)
+ except ClassNotFound:
+ return [(0, Other, text)]
+
+ if isinstance(lexer, type(self)):
+ lexer.max_nested_level = self.max_nested_level - 1
+
+ return lexer.get_tokens_unprocessed(text)
+
+ def store_content_type(self, match):
+ self.content_type = match.group(1)
+
+ prefix_len = match.start(1) - match.start(0)
+ yield match.start(0), Text.Whitespace, match.group(0)[:prefix_len]
+ yield match.start(1), Name.Label, match.group(2)
+ yield match.end(2), String.Delimiter, u"/"
+ yield match.start(3), Name.Label, match.group(3)
+
+ def get_content_type_subtokens(self, match):
+ yield match.start(1), Text, match.group(1)
+ yield match.start(2), Text.Whitespace, match.group(2)
+ yield match.start(3), Name.Attribute, match.group(3)
+ yield match.start(4), Operator, match.group(4)
+ yield match.start(5), String, match.group(5)
+
+ if match.group(3).lower() == "boundary":
+ boundary = match.group(5).strip()
+ if boundary[0] == '"' and boundary[-1] == '"':
+ boundary = boundary[1:-1]
+ self.boundary = boundary
+
+ def store_content_transfer_encoding(self, match):
+ self.content_transfer_encoding = match.group(0).lower()
+ yield match.start(0), Name.Constant, match.group(0)
+
+ attention_headers = {"content-type", "content-transfer-encoding"}
+
+ tokens = {
+ "root": [
+ (r"^([\w-]+):( *)([\s\S]*?\n)(?![ \t])", get_header_tokens),
+ (r"^$[\s\S]+", get_body_tokens),
+ ],
+ "header": [
+ # folding
+ (r"\n[ \t]", Text.Whitespace),
+ (r"\n(?![ \t])", Text.Whitespace, "#pop"),
+ ],
+ "content-type": [
+ include("header"),
+ (
+ r"^\s*((multipart|application|audio|font|image|model|text|video"
+ r"|message)/([\w-]+))",
+ store_content_type,
+ ),
+ (r'(;)((?:[ \t]|\n[ \t])*)([\w:-]+)(=)([\s\S]*?)(?=;|\n(?![ \t]))',
+ get_content_type_subtokens),
+ (r';[ \t]*\n(?![ \t])', Text, '#pop'),
+ ],
+ "content-transfer-encoding": [
+ include("header"),
+ (r"([\w-]+)", store_content_transfer_encoding),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/ml.py b/contrib/python/Pygments/py2/pygments/lexers/ml.py
index 461af88134..34d6c3e4ef 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/ml.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/ml.py
@@ -30,7 +30,7 @@ class SMLLexer(RegexLexer):
filenames = ['*.sml', '*.sig', '*.fun']
mimetypes = ['text/x-standardml', 'application/x-standardml']
- alphanumid_reserved = {
+ alphanumid_reserved = {
# Core
'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
@@ -39,16 +39,16 @@ class SMLLexer(RegexLexer):
# Modules
'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
'struct', 'structure', 'where',
- }
+ }
- symbolicid_reserved = {
+ symbolicid_reserved = {
# Core
':', r'\|', '=', '=>', '->', '#',
# Modules
':>',
- }
+ }
- nonid_reserved = {'(', ')', '[', ']', '{', '}', ',', ';', '...', '_'}
+ nonid_reserved = {'(', ')', '[', ']', '{', '}', ',', ';', '...', '_'}
alphanumid_re = r"[a-zA-Z][\w']*"
symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
diff --git a/contrib/python/Pygments/py2/pygments/lexers/modeling.py b/contrib/python/Pygments/py2/pygments/lexers/modeling.py
index f4dca4a905..f7336a445f 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/modeling.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/modeling.py
@@ -62,8 +62,8 @@ class ModelicaLexer(RegexLexer):
r'transpose|vector|zeros)\b', Name.Builtin),
(r'(algorithm|annotation|break|connect|constant|constrainedby|der|'
r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
- r'equation|exit|expandable|extends|external|firstTick|final|flow|for|if|'
- r'import|impure|in|initial|inner|input|interval|loop|nondiscrete|outer|'
+ r'equation|exit|expandable|extends|external|firstTick|final|flow|for|if|'
+ r'import|impure|in|initial|inner|input|interval|loop|nondiscrete|outer|'
r'output|parameter|partial|protected|public|pure|redeclare|'
r'replaceable|return|stream|then|when|while)\b',
Keyword.Reserved),
diff --git a/contrib/python/Pygments/py2/pygments/lexers/pascal.py b/contrib/python/Pygments/py2/pygments/lexers/pascal.py
index 0a8dd7df4d..c554b4a915 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/pascal.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/pascal.py
@@ -68,29 +68,29 @@ class DelphiLexer(Lexer):
'dispose', 'exit', 'false', 'new', 'true'
)
- BLOCK_KEYWORDS = {
+ BLOCK_KEYWORDS = {
'begin', 'class', 'const', 'constructor', 'destructor', 'end',
'finalization', 'function', 'implementation', 'initialization',
'label', 'library', 'operator', 'procedure', 'program', 'property',
'record', 'threadvar', 'type', 'unit', 'uses', 'var'
- }
+ }
- FUNCTION_MODIFIERS = {
+ FUNCTION_MODIFIERS = {
'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
'override', 'assembler'
- }
+ }
# XXX: those aren't global. but currently we know no way for defining
# them just for the type context.
- DIRECTIVES = {
+ DIRECTIVES = {
'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
'published', 'public'
- }
+ }
- BUILTIN_TYPES = {
+ BUILTIN_TYPES = {
'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
@@ -104,7 +104,7 @@ class DelphiLexer(Lexer):
'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
'widechar', 'widestring', 'word', 'wordbool'
- }
+ }
BUILTIN_UNITS = {
'System': (
@@ -246,7 +246,7 @@ class DelphiLexer(Lexer):
)
}
- ASM_REGISTERS = {
+ ASM_REGISTERS = {
'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
@@ -255,9 +255,9 @@ class DelphiLexer(Lexer):
'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
'xmm6', 'xmm7'
- }
+ }
- ASM_INSTRUCTIONS = {
+ ASM_INSTRUCTIONS = {
'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
@@ -296,7 +296,7 @@ class DelphiLexer(Lexer):
'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
'xlatb', 'xor'
- }
+ }
def __init__(self, **options):
Lexer.__init__(self, **options)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/pawn.py b/contrib/python/Pygments/py2/pygments/lexers/pawn.py
index 3cdfbd03e8..6123d40e21 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/pawn.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/pawn.py
@@ -86,25 +86,25 @@ class SourcePawnLexer(RegexLexer):
]
}
- SM_TYPES = {'Action', 'bool', 'Float', 'Plugin', 'String', 'any',
- 'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
- 'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
- 'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow',
- 'ConVarBounds', 'QueryCookie', 'ReplySource',
- 'ConVarQueryResult', 'ConVarQueryFinished', 'Function',
- 'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult',
- 'DBBindType', 'DBPriority', 'PropType', 'PropFieldType',
- 'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode',
- 'EventHook', 'FileType', 'FileTimeMode', 'PathType',
- 'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes',
- 'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction',
- 'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary',
- 'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType',
- 'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType',
- 'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus',
- 'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
- 'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
- 'TopMenuPosition', 'TopMenuObject', 'UserMsg'}
+ SM_TYPES = {'Action', 'bool', 'Float', 'Plugin', 'String', 'any',
+ 'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
+ 'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
+ 'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow',
+ 'ConVarBounds', 'QueryCookie', 'ReplySource',
+ 'ConVarQueryResult', 'ConVarQueryFinished', 'Function',
+ 'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult',
+ 'DBBindType', 'DBPriority', 'PropType', 'PropFieldType',
+ 'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode',
+ 'EventHook', 'FileType', 'FileTimeMode', 'PathType',
+ 'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes',
+ 'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction',
+ 'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary',
+ 'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType',
+ 'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType',
+ 'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus',
+ 'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
+ 'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
+ 'TopMenuPosition', 'TopMenuObject', 'UserMsg'}
def __init__(self, **options):
self.smhighlighting = get_bool_opt(options,
diff --git a/contrib/python/Pygments/py2/pygments/lexers/praat.py b/contrib/python/Pygments/py2/pygments/lexers/praat.py
index 4a6a14f0ea..48c4ef10e0 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/praat.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/praat.py
@@ -55,7 +55,7 @@ class PraatLexer(RegexLexer):
'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
- 'index_regex', 'integer', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
+ 'index_regex', 'integer', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
@@ -63,9 +63,9 @@ class PraatLexer(RegexLexer):
'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
- 'selected', 'semitonesToHertz', 'sentence', 'sentencetext', 'sigmoid', 'sin', 'sinc',
+ 'selected', 'semitonesToHertz', 'sentence', 'sentencetext', 'sigmoid', 'sin', 'sinc',
'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
- 'studentQ', 'tan', 'tanh', 'text', 'variableExists', 'word', 'writeFile', 'writeFileLine',
+ 'studentQ', 'tan', 'tanh', 'text', 'variableExists', 'word', 'writeFile', 'writeFileLine',
'writeInfo', 'writeInfoLine',
)
@@ -90,9 +90,9 @@ class PraatLexer(RegexLexer):
'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
- 'MixingMatrix', 'Movie', 'Network', 'Object', 'OTGrammar', 'OTHistory', 'OTMulti',
- 'PCA', 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo',
- 'Pitch', 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
+ 'MixingMatrix', 'Movie', 'Network', 'Object', 'OTGrammar', 'OTHistory', 'OTMulti',
+ 'PCA', 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo',
+ 'Pitch', 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
@@ -112,10 +112,10 @@ class PraatLexer(RegexLexer):
'defaultDirectory',
)
- object_attributes = (
- 'ncol', 'nrow', 'xmin', 'ymin', 'xmax', 'ymax', 'nx', 'ny', 'dx', 'dy',
- )
-
+ object_attributes = (
+ 'ncol', 'nrow', 'xmin', 'ymin', 'xmax', 'ymax', 'nx', 'ny', 'dx', 'dy',
+ )
+
tokens = {
'root': [
(r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
@@ -152,9 +152,9 @@ class PraatLexer(RegexLexer):
],
'command': [
(r'( ?[\w()-]+ ?)', Keyword),
-
- include('string_interpolated'),
-
+
+ include('string_interpolated'),
+
(r'\.{3}', Keyword, ('#pop', 'old_arguments')),
(r':', Keyword, ('#pop', 'comma_list')),
(r'\s', Text, '#pop'),
@@ -213,39 +213,39 @@ class PraatLexer(RegexLexer):
(r'\n', Text, '#pop'),
(r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
],
- 'object_reference': [
- include('string_interpolated'),
- (r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
-
- (words(object_attributes, prefix=r'\.'), Name.Builtin, '#pop'),
-
- (r'\$', Name.Builtin),
- (r'\[', Text, '#pop'),
+ 'object_reference': [
+ include('string_interpolated'),
+ (r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
+
+ (words(object_attributes, prefix=r'\.'), Name.Builtin, '#pop'),
+
+ (r'\$', Name.Builtin),
+ (r'\[', Text, '#pop'),
],
'variable_name': [
include('operator'),
include('number'),
(words(variables_string, suffix=r'\$'), Name.Variable.Global),
- (words(variables_numeric,
- suffix=r'(?=[^a-zA-Z0-9\._"\'\$#\[:\(]|\s|^|$)'),
- Name.Variable.Global),
+ (words(variables_numeric,
+ suffix=r'(?=[^a-zA-Z0-9\._"\'\$#\[:\(]|\s|^|$)'),
+ Name.Variable.Global),
- (words(objects, prefix=r'\b', suffix=r"(_)"),
- bygroups(Name.Builtin, Name.Builtin),
- 'object_reference'),
+ (words(objects, prefix=r'\b', suffix=r"(_)"),
+ bygroups(Name.Builtin, Name.Builtin),
+ 'object_reference'),
(r'\.?_?[a-z][\w.]*(\$|#)?', Text),
(r'[\[\]]', Punctuation, 'comma_list'),
-
- include('string_interpolated'),
+
+ include('string_interpolated'),
],
'operator': [
(r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)', Operator),
(r'(?<![\w.])(and|or|not|div|mod)(?![\w.])', Operator.Word),
],
'string_interpolated': [
- (r'\'[_a-z][^\[\]\'":]*(\[([\d,]+|"[\w\d,]+")\])?(:[0-9]+)?\'',
+ (r'\'[_a-z][^\[\]\'":]*(\[([\d,]+|"[\w\d,]+")\])?(:[0-9]+)?\'',
String.Interpol),
],
'string_unquoted': [
@@ -253,9 +253,9 @@ class PraatLexer(RegexLexer):
(r'\n', Text, '#pop'),
(r'\s', Text),
-
- include('string_interpolated'),
-
+
+ include('string_interpolated'),
+
(r"'", String),
(r"[^'\n]+", String),
],
@@ -263,14 +263,14 @@ class PraatLexer(RegexLexer):
(r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
(r'"', String, '#pop'),
-
- include('string_interpolated'),
-
+
+ include('string_interpolated'),
+
(r"'", String),
(r'[^\'"\n]+', String),
],
'old_form': [
- (r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
+ (r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
(r'\s+', Text),
(r'(optionmenu|choice)([ \t]+\S+:[ \t]+)',
diff --git a/contrib/python/Pygments/py2/pygments/lexers/prolog.py b/contrib/python/Pygments/py2/pygments/lexers/prolog.py
index 70783625e0..4682ca4bb4 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/prolog.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/prolog.py
@@ -107,19 +107,19 @@ class LogtalkLexer(RegexLexer):
(r'\n', Text),
(r'\s+', Text),
# Numbers
- (r"0'[\\]?.", Number),
+ (r"0'[\\]?.", Number),
(r'0b[01]+', Number.Bin),
(r'0o[0-7]+', Number.Oct),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
# Variables
- (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+ (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
# Event handlers
(r'(after|before)(?=[(])', Keyword),
# Message forwarding handler
(r'forward(?=[(])', Keyword),
# Execution-context methods
- (r'(context|parameter|this|se(lf|nder))(?=[(])', Keyword),
+ (r'(context|parameter|this|se(lf|nder))(?=[(])', Keyword),
# Reflection
(r'(current_predicate|predicate_property)(?=[(])', Keyword),
# DCGs and term expansion
@@ -135,23 +135,23 @@ class LogtalkLexer(RegexLexer):
# Events
(r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
# Flags
- (r'(create|current|set)_logtalk_flag(?=[(])', Keyword),
+ (r'(create|current|set)_logtalk_flag(?=[(])', Keyword),
# Compiling, loading, and library paths
- (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword),
+ (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword),
(r'\blogtalk_make\b', Keyword),
# Database
(r'(clause|retract(all)?)(?=[(])', Keyword),
(r'a(bolish|ssert(a|z))(?=[(])', Keyword),
# Control constructs
(r'(ca(ll|tch)|throw)(?=[(])', Keyword),
- (r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword),
- (r'(type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
+ (r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword),
+ (r'(type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
# All solutions
(r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
- # Multi-threading predicates
- (r'threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
- # Engine predicates
- (r'threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword),
+ # Multi-threading predicates
+ (r'threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
+ # Engine predicates
+ (r'threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword),
# Term unification
(r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
# Term creation and decomposition
@@ -163,7 +163,7 @@ class LogtalkLexer(RegexLexer):
# Other arithmetic functors
(r'(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword),
# Term testing
- (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword),
+ (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword),
# Term comparison
(r'compare(?=[(])', Keyword),
# Stream selection and control
@@ -228,10 +228,10 @@ class LogtalkLexer(RegexLexer):
(r'\^', Operator),
# Strings
(r'"(\\\\|\\"|[^"])*"', String),
- # Punctuation
+ # Punctuation
(r'[()\[\],.|]', Text),
# Atoms
- (r"[a-z][a-zA-Z0-9_]*", Text),
+ (r"[a-z][a-zA-Z0-9_]*", Text),
(r"'", String, 'quoted_atom'),
],
@@ -246,35 +246,35 @@ class LogtalkLexer(RegexLexer):
'directive': [
# Conditional compilation directives
(r'(el)?if(?=[(])', Keyword, 'root'),
- (r'(e(lse|ndif))(?=[.])', Keyword, 'root'),
+ (r'(e(lse|ndif))(?=[.])', Keyword, 'root'),
# Entity directives
(r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
- (r'(end_(category|object|protocol))(?=[.])', Keyword, 'root'),
+ (r'(end_(category|object|protocol))(?=[.])', Keyword, 'root'),
# Predicate scope directives
(r'(public|protected|private)(?=[(])', Keyword, 'root'),
# Other directives
(r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
(r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
- (r'(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'),
- (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
+ (r'(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'),
+ (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
(r'op(?=[(])', Keyword, 'root'),
(r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
- (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
- (r'[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root'),
+ (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
+ (r'[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root'),
],
'entityrelations': [
(r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
# Numbers
- (r"0'[\\]?.", Number),
+ (r"0'[\\]?.", Number),
(r'0b[01]+', Number.Bin),
(r'0o[0-7]+', Number.Oct),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
# Variables
- (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+ (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
# Atoms
- (r"[a-z][a-zA-Z0-9_]*", Text),
+ (r"[a-z][a-zA-Z0-9_]*", Text),
(r"'", String, 'quoted_atom'),
# Strings
(r'"(\\\\|\\"|[^"])*"', String),
@@ -282,7 +282,7 @@ class LogtalkLexer(RegexLexer):
(r'([)]\.)', Text, 'root'),
# Scope operator
(r'(::)', Operator),
- # Punctuation
+ # Punctuation
(r'[()\[\],.|]', Text),
# Comments
(r'%.*?\n', Comment),
diff --git a/contrib/python/Pygments/py2/pygments/lexers/python.py b/contrib/python/Pygments/py2/pygments/lexers/python.py
index 5f700e7f5d..ece2e75962 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/python.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/python.py
@@ -19,291 +19,291 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
from pygments import unistring as uni
__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
- 'Python2Lexer', 'Python2TracebackLexer',
- 'CythonLexer', 'DgLexer', 'NumPyLexer']
+ 'Python2Lexer', 'Python2TracebackLexer',
+ 'CythonLexer', 'DgLexer', 'NumPyLexer']
line_re = re.compile('.*?\n')
class PythonLexer(RegexLexer):
"""
- For `Python <http://www.python.org>`_ source code (version 3.x).
-
- .. versionadded:: 0.10
-
- .. versionchanged:: 2.5
- This is now the default ``PythonLexer``. It is still available as the
- alias ``Python3Lexer``.
+ For `Python <http://www.python.org>`_ source code (version 3.x).
+
+ .. versionadded:: 0.10
+
+ .. versionchanged:: 2.5
+ This is now the default ``PythonLexer``. It is still available as the
+ alias ``Python3Lexer``.
"""
name = 'Python'
- aliases = ['python', 'py', 'sage', 'python3', 'py3']
- filenames = [
- '*.py',
- '*.pyw',
- # Jython
- '*.jy',
- # Sage
- '*.sage',
- # SCons
- '*.sc',
- 'SConstruct',
- 'SConscript',
- # Skylark/Starlark (used by Bazel, Buck, and Pants)
- '*.bzl',
- 'BUCK',
- 'BUILD',
- 'BUILD.bazel',
- 'WORKSPACE',
- # Twisted Application infrastructure
- '*.tac',
- ]
- mimetypes = ['text/x-python', 'application/x-python',
- 'text/x-python3', 'application/x-python3']
-
- flags = re.MULTILINE | re.UNICODE
-
- uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
-
- def innerstring_rules(ttype):
- return [
- # the old style '%s' % (...) string formatting (still valid in Py3)
- (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[E-GXc-giorsaux%]', String.Interpol),
- # the new style '{}'.format(...) string formatting
- (r'\{'
- r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
- r'(\![sra])?' # conversion
- r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
- r'\}', String.Interpol),
-
- # backslashes, quotes and formatting signs must be parsed one at a time
- (r'[^\\\'"%{\n]+', ttype),
- (r'[\'"\\]', ttype),
- # unhandled string formatting sign
- (r'%|(\{{1,2})', ttype)
- # newlines are an error (use "nl" state)
- ]
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
- bygroups(Text, String.Affix, String.Doc)),
- (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
- bygroups(Text, String.Affix, String.Doc)),
- (r'[^\S\n]+', Text),
- (r'\A#!.+$', Comment.Hashbang),
- (r'#.*$', Comment.Single),
- (r'[]{}:(),;[]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
- include('keywords'),
- (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
- (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'fromimport'),
- (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'import'),
- include('builtins'),
- include('magicfuncs'),
- include('magicvars'),
- # raw strings
- ('(?i)(rb|br|fr|rf|r)(""")',
- bygroups(String.Affix, String.Double), 'tdqs'),
- ("(?i)(rb|br|fr|rf|r)(''')",
- bygroups(String.Affix, String.Single), 'tsqs'),
- ('(?i)(rb|br|fr|rf|r)(")',
- bygroups(String.Affix, String.Double), 'dqs'),
- ("(?i)(rb|br|fr|rf|r)(')",
- bygroups(String.Affix, String.Single), 'sqs'),
- # non-raw strings
- ('([uUbBfF]?)(""")', bygroups(String.Affix, String.Double),
- combined('stringescape', 'tdqs')),
- ("([uUbBfF]?)(''')", bygroups(String.Affix, String.Single),
- combined('stringescape', 'tsqs')),
- ('([uUbBfF]?)(")', bygroups(String.Affix, String.Double),
- combined('stringescape', 'dqs')),
- ("([uUbBfF]?)(')", bygroups(String.Affix, String.Single),
- combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
- ],
- 'keywords': [
- (words((
- 'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
- 'else', 'except', 'finally', 'for', 'global', 'if', 'lambda',
- 'pass', 'raise', 'nonlocal', 'return', 'try', 'while', 'yield',
- 'yield from', 'as', 'with'), suffix=r'\b'),
- Keyword),
- (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
- ],
- 'builtins': [
- (words((
- '__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray',
- 'bytes', 'chr', 'classmethod', 'cmp', 'compile', 'complex',
- 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'filter',
- 'float', 'format', 'frozenset', 'getattr', 'globals', 'hasattr',
- 'hash', 'hex', 'id', 'input', 'int', 'isinstance', 'issubclass',
- 'iter', 'len', 'list', 'locals', 'map', 'max', 'memoryview',
- 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print',
- 'property', 'range', 'repr', 'reversed', 'round', 'set', 'setattr',
- 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple',
- 'type', 'vars', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Builtin),
- (r'(?<!\.)(self|Ellipsis|NotImplemented|cls)\b', Name.Builtin.Pseudo),
- (words((
- 'ArithmeticError', 'AssertionError', 'AttributeError',
- 'BaseException', 'BufferError', 'BytesWarning', 'DeprecationWarning',
- 'EOFError', 'EnvironmentError', 'Exception', 'FloatingPointError',
- 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError',
- 'ImportWarning', 'IndentationError', 'IndexError', 'KeyError',
- 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',
- 'NotImplementedError', 'OSError', 'OverflowError',
- 'PendingDeprecationWarning', 'ReferenceError', 'ResourceWarning',
- 'RuntimeError', 'RuntimeWarning', 'StopIteration',
- 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
- 'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
- 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
- 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError',
- 'Warning', 'WindowsError', 'ZeroDivisionError',
- # new builtin exceptions from PEP 3151
- 'BlockingIOError', 'ChildProcessError', 'ConnectionError',
- 'BrokenPipeError', 'ConnectionAbortedError', 'ConnectionRefusedError',
- 'ConnectionResetError', 'FileExistsError', 'FileNotFoundError',
- 'InterruptedError', 'IsADirectoryError', 'NotADirectoryError',
- 'PermissionError', 'ProcessLookupError', 'TimeoutError',
- # others new in Python 3
- 'StopAsyncIteration'),
- prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Exception),
- ],
- 'magicfuncs': [
- (words((
- '__abs__', '__add__', '__aenter__', '__aexit__', '__aiter__',
- '__and__', '__anext__', '__await__', '__bool__', '__bytes__',
- '__call__', '__complex__', '__contains__', '__del__', '__delattr__',
- '__delete__', '__delitem__', '__dir__', '__divmod__', '__enter__',
- '__eq__', '__exit__', '__float__', '__floordiv__', '__format__',
- '__ge__', '__get__', '__getattr__', '__getattribute__',
- '__getitem__', '__gt__', '__hash__', '__iadd__', '__iand__',
- '__ifloordiv__', '__ilshift__', '__imatmul__', '__imod__',
- '__imul__', '__index__', '__init__', '__instancecheck__',
- '__int__', '__invert__', '__ior__', '__ipow__', '__irshift__',
- '__isub__', '__iter__', '__itruediv__', '__ixor__', '__le__',
- '__len__', '__length_hint__', '__lshift__', '__lt__', '__matmul__',
- '__missing__', '__mod__', '__mul__', '__ne__', '__neg__',
- '__new__', '__next__', '__or__', '__pos__', '__pow__',
- '__prepare__', '__radd__', '__rand__', '__rdivmod__', '__repr__',
- '__reversed__', '__rfloordiv__', '__rlshift__', '__rmatmul__',
- '__rmod__', '__rmul__', '__ror__', '__round__', '__rpow__',
- '__rrshift__', '__rshift__', '__rsub__', '__rtruediv__',
- '__rxor__', '__set__', '__setattr__', '__setitem__', '__str__',
- '__sub__', '__subclasscheck__', '__truediv__',
- '__xor__'), suffix=r'\b'),
- Name.Function.Magic),
- ],
- 'magicvars': [
- (words((
- '__annotations__', '__bases__', '__class__', '__closure__',
- '__code__', '__defaults__', '__dict__', '__doc__', '__file__',
- '__func__', '__globals__', '__kwdefaults__', '__module__',
- '__mro__', '__name__', '__objclass__', '__qualname__',
- '__self__', '__slots__', '__weakref__'), suffix=r'\b'),
- Name.Variable.Magic),
- ],
- 'numbers': [
- (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)'
- r'([eE][+-]?\d(?:_?\d)*)?', Number.Float),
- (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
- (r'0[oO](?:_?[0-7])+', Number.Oct),
- (r'0[bB](?:_?[01])+', Number.Bin),
- (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
- (r'\d(?:_?\d)*', Number.Integer),
- ],
- 'name': [
- (r'@' + uni_name, Name.Decorator),
- (r'@', Operator), # new matrix multiplication operator
- (uni_name, Name),
- ],
- 'funcname': [
- include('magicfuncs'),
- (uni_name, Name.Function, '#pop'),
- default('#pop'),
- ],
- 'classname': [
- (uni_name, Name.Class, '#pop'),
- ],
- 'import': [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
- (r'\.', Name.Namespace),
- (uni_name, Name.Namespace),
- (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
- default('#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(\s+)(import)\b', bygroups(Text, Keyword.Namespace), '#pop'),
- (r'\.', Name.Namespace),
- # if None occurs here, it's "raise x from None", since None can
- # never be a module name
- (r'None\b', Name.Builtin.Pseudo, '#pop'),
- (uni_name, Name.Namespace),
- default('#pop'),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings-single': innerstring_rules(String.Single),
- 'strings-double': innerstring_rules(String.Double),
- 'dqs': [
- (r'"', String.Double, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include('strings-double')
- ],
- 'sqs': [
- (r"'", String.Single, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include('strings-single')
- ],
- 'tdqs': [
- (r'"""', String.Double, '#pop'),
- include('strings-double'),
- (r'\n', String.Double)
- ],
- 'tsqs': [
- (r"'''", String.Single, '#pop'),
- include('strings-single'),
- (r'\n', String.Single)
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'pythonw?(3(\.\d)?)?')
-
-
-Python3Lexer = PythonLexer
-
-
-class Python2Lexer(RegexLexer):
- """
- For `Python 2.x <http://www.python.org>`_ source code.
-
- .. versionchanged:: 2.5
- This class has been renamed from ``PythonLexer``. ``PythonLexer`` now
- refers to the Python 3 variant. File name patterns like ``*.py`` have
- been moved to Python 3 as well.
- """
-
- name = 'Python 2.x'
- aliases = ['python2', 'py2']
- filenames = [] # now taken over by PythonLexer (3.x)
- mimetypes = ['text/x-python2', 'application/x-python2']
-
+ aliases = ['python', 'py', 'sage', 'python3', 'py3']
+ filenames = [
+ '*.py',
+ '*.pyw',
+ # Jython
+ '*.jy',
+ # Sage
+ '*.sage',
+ # SCons
+ '*.sc',
+ 'SConstruct',
+ 'SConscript',
+ # Skylark/Starlark (used by Bazel, Buck, and Pants)
+ '*.bzl',
+ 'BUCK',
+ 'BUILD',
+ 'BUILD.bazel',
+ 'WORKSPACE',
+ # Twisted Application infrastructure
+ '*.tac',
+ ]
+ mimetypes = ['text/x-python', 'application/x-python',
+ 'text/x-python3', 'application/x-python3']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
+
def innerstring_rules(ttype):
return [
+ # the old style '%s' % (...) string formatting (still valid in Py3)
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[E-GXc-giorsaux%]', String.Interpol),
+ # the new style '{}'.format(...) string formatting
+ (r'\{'
+ r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
+ r'(\![sra])?' # conversion
+ r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
+ r'\}', String.Interpol),
+
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"%{\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ # unhandled string formatting sign
+ (r'%|(\{{1,2})', ttype)
+ # newlines are an error (use "nl" state)
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
+ bygroups(Text, String.Affix, String.Doc)),
+ (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
+ bygroups(Text, String.Affix, String.Doc)),
+ (r'[^\S\n]+', Text),
+ (r'\A#!.+$', Comment.Hashbang),
+ (r'#.*$', Comment.Single),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
+ include('keywords'),
+ (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
+ (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ 'fromimport'),
+ (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ 'import'),
+ include('builtins'),
+ include('magicfuncs'),
+ include('magicvars'),
+ # raw strings
+ ('(?i)(rb|br|fr|rf|r)(""")',
+ bygroups(String.Affix, String.Double), 'tdqs'),
+ ("(?i)(rb|br|fr|rf|r)(''')",
+ bygroups(String.Affix, String.Single), 'tsqs'),
+ ('(?i)(rb|br|fr|rf|r)(")',
+ bygroups(String.Affix, String.Double), 'dqs'),
+ ("(?i)(rb|br|fr|rf|r)(')",
+ bygroups(String.Affix, String.Single), 'sqs'),
+ # non-raw strings
+ ('([uUbBfF]?)(""")', bygroups(String.Affix, String.Double),
+ combined('stringescape', 'tdqs')),
+ ("([uUbBfF]?)(''')", bygroups(String.Affix, String.Single),
+ combined('stringescape', 'tsqs')),
+ ('([uUbBfF]?)(")', bygroups(String.Affix, String.Double),
+ combined('stringescape', 'dqs')),
+ ("([uUbBfF]?)(')", bygroups(String.Affix, String.Single),
+ combined('stringescape', 'sqs')),
+ include('name'),
+ include('numbers'),
+ ],
+ 'keywords': [
+ (words((
+ 'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
+ 'else', 'except', 'finally', 'for', 'global', 'if', 'lambda',
+ 'pass', 'raise', 'nonlocal', 'return', 'try', 'while', 'yield',
+ 'yield from', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
+ ],
+ 'builtins': [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray',
+ 'bytes', 'chr', 'classmethod', 'cmp', 'compile', 'complex',
+ 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'filter',
+ 'float', 'format', 'frozenset', 'getattr', 'globals', 'hasattr',
+ 'hash', 'hex', 'id', 'input', 'int', 'isinstance', 'issubclass',
+ 'iter', 'len', 'list', 'locals', 'map', 'max', 'memoryview',
+ 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print',
+ 'property', 'range', 'repr', 'reversed', 'round', 'set', 'setattr',
+ 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple',
+ 'type', 'vars', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
+ (r'(?<!\.)(self|Ellipsis|NotImplemented|cls)\b', Name.Builtin.Pseudo),
+ (words((
+ 'ArithmeticError', 'AssertionError', 'AttributeError',
+ 'BaseException', 'BufferError', 'BytesWarning', 'DeprecationWarning',
+ 'EOFError', 'EnvironmentError', 'Exception', 'FloatingPointError',
+ 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError',
+ 'ImportWarning', 'IndentationError', 'IndexError', 'KeyError',
+ 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',
+ 'NotImplementedError', 'OSError', 'OverflowError',
+ 'PendingDeprecationWarning', 'ReferenceError', 'ResourceWarning',
+ 'RuntimeError', 'RuntimeWarning', 'StopIteration',
+ 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
+ 'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
+ 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
+ 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError',
+ 'Warning', 'WindowsError', 'ZeroDivisionError',
+ # new builtin exceptions from PEP 3151
+ 'BlockingIOError', 'ChildProcessError', 'ConnectionError',
+ 'BrokenPipeError', 'ConnectionAbortedError', 'ConnectionRefusedError',
+ 'ConnectionResetError', 'FileExistsError', 'FileNotFoundError',
+ 'InterruptedError', 'IsADirectoryError', 'NotADirectoryError',
+ 'PermissionError', 'ProcessLookupError', 'TimeoutError',
+ # others new in Python 3
+ 'StopAsyncIteration'),
+ prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Exception),
+ ],
+ 'magicfuncs': [
+ (words((
+ '__abs__', '__add__', '__aenter__', '__aexit__', '__aiter__',
+ '__and__', '__anext__', '__await__', '__bool__', '__bytes__',
+ '__call__', '__complex__', '__contains__', '__del__', '__delattr__',
+ '__delete__', '__delitem__', '__dir__', '__divmod__', '__enter__',
+ '__eq__', '__exit__', '__float__', '__floordiv__', '__format__',
+ '__ge__', '__get__', '__getattr__', '__getattribute__',
+ '__getitem__', '__gt__', '__hash__', '__iadd__', '__iand__',
+ '__ifloordiv__', '__ilshift__', '__imatmul__', '__imod__',
+ '__imul__', '__index__', '__init__', '__instancecheck__',
+ '__int__', '__invert__', '__ior__', '__ipow__', '__irshift__',
+ '__isub__', '__iter__', '__itruediv__', '__ixor__', '__le__',
+ '__len__', '__length_hint__', '__lshift__', '__lt__', '__matmul__',
+ '__missing__', '__mod__', '__mul__', '__ne__', '__neg__',
+ '__new__', '__next__', '__or__', '__pos__', '__pow__',
+ '__prepare__', '__radd__', '__rand__', '__rdivmod__', '__repr__',
+ '__reversed__', '__rfloordiv__', '__rlshift__', '__rmatmul__',
+ '__rmod__', '__rmul__', '__ror__', '__round__', '__rpow__',
+ '__rrshift__', '__rshift__', '__rsub__', '__rtruediv__',
+ '__rxor__', '__set__', '__setattr__', '__setitem__', '__str__',
+ '__sub__', '__subclasscheck__', '__truediv__',
+ '__xor__'), suffix=r'\b'),
+ Name.Function.Magic),
+ ],
+ 'magicvars': [
+ (words((
+ '__annotations__', '__bases__', '__class__', '__closure__',
+ '__code__', '__defaults__', '__dict__', '__doc__', '__file__',
+ '__func__', '__globals__', '__kwdefaults__', '__module__',
+ '__mro__', '__name__', '__objclass__', '__qualname__',
+ '__self__', '__slots__', '__weakref__'), suffix=r'\b'),
+ Name.Variable.Magic),
+ ],
+ 'numbers': [
+ (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)'
+ r'([eE][+-]?\d(?:_?\d)*)?', Number.Float),
+ (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
+ (r'0[oO](?:_?[0-7])+', Number.Oct),
+ (r'0[bB](?:_?[01])+', Number.Bin),
+ (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
+ (r'\d(?:_?\d)*', Number.Integer),
+ ],
+ 'name': [
+ (r'@' + uni_name, Name.Decorator),
+ (r'@', Operator), # new matrix multiplication operator
+ (uni_name, Name),
+ ],
+ 'funcname': [
+ include('magicfuncs'),
+ (uni_name, Name.Function, '#pop'),
+ default('#pop'),
+ ],
+ 'classname': [
+ (uni_name, Name.Class, '#pop'),
+ ],
+ 'import': [
+ (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'\.', Name.Namespace),
+ (uni_name, Name.Namespace),
+ (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
+ default('#pop') # all else: go back
+ ],
+ 'fromimport': [
+ (r'(\s+)(import)\b', bygroups(Text, Keyword.Namespace), '#pop'),
+ (r'\.', Name.Namespace),
+ # if None occurs here, it's "raise x from None", since None can
+ # never be a module name
+ (r'None\b', Name.Builtin.Pseudo, '#pop'),
+ (uni_name, Name.Namespace),
+ default('#pop'),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings-single': innerstring_rules(String.Single),
+ 'strings-double': innerstring_rules(String.Double),
+ 'dqs': [
+ (r'"', String.Double, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ include('strings-double')
+ ],
+ 'sqs': [
+ (r"'", String.Single, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
+ include('strings-single')
+ ],
+ 'tdqs': [
+ (r'"""', String.Double, '#pop'),
+ include('strings-double'),
+ (r'\n', String.Double)
+ ],
+ 'tsqs': [
+ (r"'''", String.Single, '#pop'),
+ include('strings-single'),
+ (r'\n', String.Single)
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'pythonw?(3(\.\d)?)?')
+
+
+Python3Lexer = PythonLexer
+
+
+class Python2Lexer(RegexLexer):
+ """
+ For `Python 2.x <http://www.python.org>`_ source code.
+
+ .. versionchanged:: 2.5
+ This class has been renamed from ``PythonLexer``. ``PythonLexer`` now
+ refers to the Python 3 variant. File name patterns like ``*.py`` have
+ been moved to Python 3 as well.
+ """
+
+ name = 'Python 2.x'
+ aliases = ['python2', 'py2']
+ filenames = [] # now taken over by PythonLexer (3.x)
+ mimetypes = ['text/x-python2', 'application/x-python2']
+
+ def innerstring_rules(ttype):
+ return [
# the old style '%s' % (...) string formatting
(r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
@@ -391,15 +391,15 @@ class Python2Lexer(RegexLexer):
'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
- 'MemoryError', 'ModuleNotFoundError', 'NameError',
- 'NotImplementedError', 'OSError', 'OverflowError', 'OverflowWarning',
- 'PendingDeprecationWarning', 'RecursionError', 'ReferenceError',
- 'RuntimeError', 'RuntimeWarning', 'StandardError', 'StopIteration',
- 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
- 'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
- 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
- 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',
- 'WindowsError', 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
+ 'MemoryError', 'ModuleNotFoundError', 'NameError',
+ 'NotImplementedError', 'OSError', 'OverflowError', 'OverflowWarning',
+ 'PendingDeprecationWarning', 'RecursionError', 'ReferenceError',
+ 'RuntimeError', 'RuntimeWarning', 'StandardError', 'StopIteration',
+ 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
+ 'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
+ 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
+ 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',
+ 'WindowsError', 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
Name.Exception),
],
'magicfuncs': [
@@ -505,7 +505,7 @@ class Python2Lexer(RegexLexer):
}
def analyse_text(text):
- return shebang_matches(text, r'pythonw?2(\.\d)?') or \
+ return shebang_matches(text, r'pythonw?2(\.\d)?') or \
'import ' in text[:1000]
@@ -526,27 +526,27 @@ class PythonConsoleLexer(Lexer):
Additional options:
`python3`
- Use Python 3 lexer for code. Default is ``True``.
+ Use Python 3 lexer for code. Default is ``True``.
.. versionadded:: 1.0
- .. versionchanged:: 2.5
- Now defaults to ``True``.
+ .. versionchanged:: 2.5
+ Now defaults to ``True``.
"""
name = 'Python console session'
aliases = ['pycon']
mimetypes = ['text/x-python-doctest']
def __init__(self, **options):
- self.python3 = get_bool_opt(options, 'python3', True)
+ self.python3 = get_bool_opt(options, 'python3', True)
Lexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
if self.python3:
pylexer = PythonLexer(**self.options)
tblexer = PythonTracebackLexer(**self.options)
- else:
- pylexer = Python2Lexer(**self.options)
- tblexer = Python2TracebackLexer(**self.options)
+ else:
+ pylexer = Python2Lexer(**self.options)
+ tblexer = Python2TracebackLexer(**self.options)
curcode = ''
insertions = []
@@ -601,28 +601,28 @@ class PythonConsoleLexer(Lexer):
class PythonTracebackLexer(RegexLexer):
"""
- For Python 3.x tracebacks, with support for chained exceptions.
-
- .. versionadded:: 1.0
+ For Python 3.x tracebacks, with support for chained exceptions.
- .. versionchanged:: 2.5
- This is now the default ``PythonTracebackLexer``. It is still available
- as the alias ``Python3TracebackLexer``.
+ .. versionadded:: 1.0
+
+ .. versionchanged:: 2.5
+ This is now the default ``PythonTracebackLexer``. It is still available
+ as the alias ``Python3TracebackLexer``.
"""
name = 'Python Traceback'
- aliases = ['pytb', 'py3tb']
- filenames = ['*.pytb', '*.py3tb']
- mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
+ aliases = ['pytb', 'py3tb']
+ filenames = ['*.pytb', '*.py3tb']
+ mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
tokens = {
'root': [
- (r'\n', Text),
- (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
- (r'^During handling of the above exception, another '
- r'exception occurred:\n\n', Generic.Traceback),
- (r'^The above exception was the direct cause of the '
- r'following exception:\n\n', Generic.Traceback),
+ (r'\n', Text),
+ (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
+ (r'^During handling of the above exception, another '
+ r'exception occurred:\n\n', Generic.Traceback),
+ (r'^The above exception was the direct cause of the '
+ r'following exception:\n\n', Generic.Traceback),
(r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
(r'^.*\n', Other),
],
@@ -643,34 +643,34 @@ class PythonTracebackLexer(RegexLexer):
}
-Python3TracebackLexer = PythonTracebackLexer
-
-
-class Python2TracebackLexer(RegexLexer):
+Python3TracebackLexer = PythonTracebackLexer
+
+
+class Python2TracebackLexer(RegexLexer):
"""
- For Python tracebacks.
-
- .. versionadded:: 0.7
+ For Python tracebacks.
- .. versionchanged:: 2.5
- This class has been renamed from ``PythonTracebackLexer``.
- ``PythonTracebackLexer`` now refers to the Python 3 variant.
+ .. versionadded:: 0.7
+
+ .. versionchanged:: 2.5
+ This class has been renamed from ``PythonTracebackLexer``.
+ ``PythonTracebackLexer`` now refers to the Python 3 variant.
"""
- name = 'Python 2.x Traceback'
- aliases = ['py2tb']
- filenames = ['*.py2tb']
- mimetypes = ['text/x-python2-traceback']
+ name = 'Python 2.x Traceback'
+ aliases = ['py2tb']
+ filenames = ['*.py2tb']
+ mimetypes = ['text/x-python2-traceback']
tokens = {
'root': [
- # Cover both (most recent call last) and (innermost last)
- # The optional ^C allows us to catch keyboard interrupt signals.
- (r'^(\^C)?(Traceback.*\n)',
- bygroups(Text, Generic.Traceback), 'intb'),
- # SyntaxError starts with this.
+ # Cover both (most recent call last) and (innermost last)
+ # The optional ^C allows us to catch keyboard interrupt signals.
+ (r'^(\^C)?(Traceback.*\n)',
+ bygroups(Text, Generic.Traceback), 'intb'),
+ # SyntaxError starts with this.
(r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
- (r'^.*\n', Other),
+ (r'^.*\n', Other),
],
'intb': [
(r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
@@ -678,7 +678,7 @@ class Python2TracebackLexer(RegexLexer):
(r'^( File )("[^"]+")(, line )(\d+)(\n)',
bygroups(Text, Name.Builtin, Text, Number, Text)),
(r'^( )(.+)(\n)',
- bygroups(Text, using(Python2Lexer), Text)),
+ bygroups(Text, using(Python2Lexer), Text)),
(r'^([ \t]*)(\.\.\.)(\n)',
bygroups(Text, Comment, Text)), # for doctests...
(r'^([^:]+)(: )(.+)(\n)',
@@ -981,7 +981,7 @@ class NumPyLexer(PythonLexer):
mimetypes = []
filenames = []
- EXTRA_KEYWORDS = {
+ EXTRA_KEYWORDS = {
'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
@@ -1046,7 +1046,7 @@ class NumPyLexer(PythonLexer):
'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
- }
+ }
def get_tokens_unprocessed(self, text):
for index, token, value in \
@@ -1057,6 +1057,6 @@ class NumPyLexer(PythonLexer):
yield index, token, value
def analyse_text(text):
- return (shebang_matches(text, r'pythonw?(3(\.\d)?)?') or
+ return (shebang_matches(text, r'pythonw?(3(\.\d)?)?') or
'import ' in text[:1000]) \
and ('import numpy' in text or 'from numpy import' in text)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/rdf.py b/contrib/python/Pygments/py2/pygments/lexers/rdf.py
index 5927a686d4..5240a1bd97 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/rdf.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/rdf.py
@@ -15,7 +15,7 @@ from pygments.lexer import RegexLexer, bygroups, default
from pygments.token import Keyword, Punctuation, String, Number, Operator, Generic, \
Whitespace, Name, Literal, Comment, Text
-__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
+__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
class SparqlLexer(RegexLexer):
@@ -275,149 +275,149 @@ class TurtleLexer(RegexLexer):
for t in ('@base ', 'BASE ', '@prefix ', 'PREFIX '):
if re.search(r'^\s*%s' % t, text):
return 0.80
-
-
-class ShExCLexer(RegexLexer):
- """
- Lexer for `ShExC <https://shex.io/shex-semantics/#shexc>`_ shape expressions language syntax.
- """
- name = 'ShExC'
- aliases = ['shexc', 'shex']
- filenames = ['*.shex']
- mimetypes = ['text/shex']
-
- # character group definitions ::
-
- PN_CHARS_BASE_GRP = (u'a-zA-Z'
- u'\u00c0-\u00d6'
- u'\u00d8-\u00f6'
- u'\u00f8-\u02ff'
- u'\u0370-\u037d'
- u'\u037f-\u1fff'
- u'\u200c-\u200d'
- u'\u2070-\u218f'
- u'\u2c00-\u2fef'
- u'\u3001-\ud7ff'
- u'\uf900-\ufdcf'
- u'\ufdf0-\ufffd')
-
- PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
-
- PN_CHARS_GRP = (PN_CHARS_U_GRP +
- r'\-' +
- r'0-9' +
- u'\u00b7' +
- u'\u0300-\u036f' +
- u'\u203f-\u2040')
-
- HEX_GRP = '0-9A-Fa-f'
-
- PN_LOCAL_ESC_CHARS_GRP = r"_~.\-!$&'()*+,;=/?#@%"
-
- # terminal productions ::
-
- PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
-
- PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
-
- PN_CHARS = '[' + PN_CHARS_GRP + ']'
-
- HEX = '[' + HEX_GRP + ']'
-
- PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
-
- UCHAR_NO_BACKSLASH = '(?:u' + HEX + '{4}|U' + HEX + '{8})'
-
- UCHAR = r'\\' + UCHAR_NO_BACKSLASH
-
- IRIREF = r'<(?:[^\x00-\x20<>"{}|^`\\]|' + UCHAR + ')*>'
-
- BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
- '.]*' + PN_CHARS + ')?'
-
- PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
-
- PERCENT = '%' + HEX + HEX
-
- PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
-
- PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
-
- PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
- '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
- PN_CHARS_GRP + ':]|' + PLX + '))?')
-
- EXPONENT = r'[eE][+-]?\d+'
-
- # Lexer token definitions ::
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- # keywords ::
- (r'(?i)(base|prefix|start|external|'
- r'literal|iri|bnode|nonliteral|length|minlength|maxlength|'
- r'mininclusive|minexclusive|maxinclusive|maxexclusive|'
- r'totaldigits|fractiondigits|'
- r'closed|extra)\b', Keyword),
- (r'(a)\b', Keyword),
- # IRIs ::
- ('(' + IRIREF + ')', Name.Label),
- # blank nodes ::
- ('(' + BLANK_NODE_LABEL + ')', Name.Label),
- # prefixed names ::
- (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + ')?',
- bygroups(Name.Namespace, Punctuation, Name.Tag)),
- # boolean literals ::
- (r'(true|false)', Keyword.Constant),
- # double literals ::
- (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
- # decimal literals ::
- (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
- # integer literals ::
- (r'[+\-]?\d+', Number.Integer),
- # operators ::
- (r'[@|$&=*+?^\-~]', Operator),
- # operator keywords ::
- (r'(?i)(and|or|not)\b', Operator.Word),
- # punctuation characters ::
- (r'[(){}.;,:^\[\]]', Punctuation),
- # line comments ::
- (r'#[^\n]*', Comment),
- # strings ::
- (r'"""', String, 'triple-double-quoted-string'),
- (r'"', String, 'single-double-quoted-string'),
- (r"'''", String, 'triple-single-quoted-string'),
- (r"'", String, 'single-single-quoted-string'),
- ],
- 'triple-double-quoted-string': [
- (r'"""', String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'single-double-quoted-string': [
- (r'"', String, 'end-of-string'),
- (r'[^"\\\n]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'triple-single-quoted-string': [
- (r"'''", String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String.Escape, 'string-escape'),
- ],
- 'single-single-quoted-string': [
- (r"'", String, 'end-of-string'),
- (r"[^'\\\n]+", String),
- (r'\\', String, 'string-escape'),
- ],
- 'string-escape': [
- (UCHAR_NO_BACKSLASH, String.Escape, '#pop'),
- (r'.', String.Escape, '#pop'),
- ],
- 'end-of-string': [
- (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
- bygroups(Operator, Name.Function), '#pop:2'),
- (r'\^\^', Operator, '#pop:2'),
- default('#pop:2'),
- ],
- }
+
+
+class ShExCLexer(RegexLexer):
+ """
+ Lexer for `ShExC <https://shex.io/shex-semantics/#shexc>`_ shape expressions language syntax.
+ """
+ name = 'ShExC'
+ aliases = ['shexc', 'shex']
+ filenames = ['*.shex']
+ mimetypes = ['text/shex']
+
+ # character group definitions ::
+
+ PN_CHARS_BASE_GRP = (u'a-zA-Z'
+ u'\u00c0-\u00d6'
+ u'\u00d8-\u00f6'
+ u'\u00f8-\u02ff'
+ u'\u0370-\u037d'
+ u'\u037f-\u1fff'
+ u'\u200c-\u200d'
+ u'\u2070-\u218f'
+ u'\u2c00-\u2fef'
+ u'\u3001-\ud7ff'
+ u'\uf900-\ufdcf'
+ u'\ufdf0-\ufffd')
+
+ PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
+
+ PN_CHARS_GRP = (PN_CHARS_U_GRP +
+ r'\-' +
+ r'0-9' +
+ u'\u00b7' +
+ u'\u0300-\u036f' +
+ u'\u203f-\u2040')
+
+ HEX_GRP = '0-9A-Fa-f'
+
+ PN_LOCAL_ESC_CHARS_GRP = r"_~.\-!$&'()*+,;=/?#@%"
+
+ # terminal productions ::
+
+ PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
+
+ PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
+
+ PN_CHARS = '[' + PN_CHARS_GRP + ']'
+
+ HEX = '[' + HEX_GRP + ']'
+
+ PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
+
+ UCHAR_NO_BACKSLASH = '(?:u' + HEX + '{4}|U' + HEX + '{8})'
+
+ UCHAR = r'\\' + UCHAR_NO_BACKSLASH
+
+ IRIREF = r'<(?:[^\x00-\x20<>"{}|^`\\]|' + UCHAR + ')*>'
+
+ BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
+ '.]*' + PN_CHARS + ')?'
+
+ PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
+
+ PERCENT = '%' + HEX + HEX
+
+ PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
+
+ PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
+
+ PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
+ '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
+ PN_CHARS_GRP + ':]|' + PLX + '))?')
+
+ EXPONENT = r'[eE][+-]?\d+'
+
+ # Lexer token definitions ::
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ # keywords ::
+ (r'(?i)(base|prefix|start|external|'
+ r'literal|iri|bnode|nonliteral|length|minlength|maxlength|'
+ r'mininclusive|minexclusive|maxinclusive|maxexclusive|'
+ r'totaldigits|fractiondigits|'
+ r'closed|extra)\b', Keyword),
+ (r'(a)\b', Keyword),
+ # IRIs ::
+ ('(' + IRIREF + ')', Name.Label),
+ # blank nodes ::
+ ('(' + BLANK_NODE_LABEL + ')', Name.Label),
+ # prefixed names ::
+ (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + ')?',
+ bygroups(Name.Namespace, Punctuation, Name.Tag)),
+ # boolean literals ::
+ (r'(true|false)', Keyword.Constant),
+ # double literals ::
+ (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
+ # decimal literals ::
+ (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
+ # integer literals ::
+ (r'[+\-]?\d+', Number.Integer),
+ # operators ::
+ (r'[@|$&=*+?^\-~]', Operator),
+ # operator keywords ::
+ (r'(?i)(and|or|not)\b', Operator.Word),
+ # punctuation characters ::
+ (r'[(){}.;,:^\[\]]', Punctuation),
+ # line comments ::
+ (r'#[^\n]*', Comment),
+ # strings ::
+ (r'"""', String, 'triple-double-quoted-string'),
+ (r'"', String, 'single-double-quoted-string'),
+ (r"'''", String, 'triple-single-quoted-string'),
+ (r"'", String, 'single-single-quoted-string'),
+ ],
+ 'triple-double-quoted-string': [
+ (r'"""', String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-double-quoted-string': [
+ (r'"', String, 'end-of-string'),
+ (r'[^"\\\n]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'triple-single-quoted-string': [
+ (r"'''", String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String.Escape, 'string-escape'),
+ ],
+ 'single-single-quoted-string': [
+ (r"'", String, 'end-of-string'),
+ (r"[^'\\\n]+", String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'string-escape': [
+ (UCHAR_NO_BACKSLASH, String.Escape, '#pop'),
+ (r'.', String.Escape, '#pop'),
+ ],
+ 'end-of-string': [
+ (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
+ bygroups(Operator, Name.Function), '#pop:2'),
+ (r'\^\^', Operator, '#pop:2'),
+ default('#pop:2'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/resource.py b/contrib/python/Pygments/py2/pygments/lexers/resource.py
index ccd4e5f6cc..ce7cd4fee7 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/resource.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/resource.py
@@ -26,7 +26,7 @@ class ResourceLexer(RegexLexer):
"""
name = 'ResourceBundle'
aliases = ['resource', 'resourcebundle']
- filenames = []
+ filenames = []
_types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
':int', ':alias')
diff --git a/contrib/python/Pygments/py2/pygments/lexers/robotframework.py b/contrib/python/Pygments/py2/pygments/lexers/robotframework.py
index 642c90c5c1..3189d9b6fc 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/robotframework.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/robotframework.py
@@ -64,7 +64,7 @@ class RobotFrameworkLexer(Lexer):
"""
name = 'RobotFramework'
aliases = ['robotframework']
- filenames = ['*.robot']
+ filenames = ['*.robot']
mimetypes = ['text/x-robotframework']
def __init__(self, **options):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/ruby.py b/contrib/python/Pygments/py2/pygments/lexers/ruby.py
index 8bcbde6714..bf6a56ecc4 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/ruby.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/ruby.py
@@ -43,17 +43,17 @@ class RubyLexer(ExtendedRegexLexer):
def heredoc_callback(self, match, ctx):
# okay, this is the hardest part of parsing Ruby...
- # match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
+ # match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
start = match.start(1)
- yield start, Operator, match.group(1) # <<[-~]?
+ yield start, Operator, match.group(1) # <<[-~]?
yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
yield match.start(3), String.Delimiter, match.group(3) # heredoc name
yield match.start(4), String.Heredoc, match.group(4) # quote again
heredocstack = ctx.__dict__.setdefault('heredocstack', [])
outermost = not bool(heredocstack)
- heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3)))
+ heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3)))
ctx.pos = match.start(5)
ctx.end = match.end(5)
@@ -247,10 +247,10 @@ class RubyLexer(ExtendedRegexLexer):
Name.Builtin),
(r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
# normal heredocs
- (r'(?<!\w)(<<[-~]?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
+ (r'(?<!\w)(<<[-~]?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
heredoc_callback),
# empty string heredocs
- (r'(<<[-~]?)("|\')()(\2)(.*?\n)', heredoc_callback),
+ (r'(<<[-~]?)("|\')()(\2)(.*?\n)', heredoc_callback),
(r'__END__', Comment.Preproc, 'end-part'),
# multiline regex (after keywords or assignments)
(r'(?:^|(?<=[=<>~!:])|'
diff --git a/contrib/python/Pygments/py2/pygments/lexers/rust.py b/contrib/python/Pygments/py2/pygments/lexers/rust.py
index f731785fe6..118d7f3a92 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/rust.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/rust.py
@@ -59,7 +59,7 @@ class RustLexer(RegexLexer):
tokens = {
'root': [
# rust allows a file to start with a shebang, but if the first line
- # starts with #![ then it's not a shebang but a crate attribute.
+ # starts with #![ then it's not a shebang but a crate attribute.
(r'#![^[\r\n].*$', Comment.Preproc),
default('base'),
],
@@ -78,10 +78,10 @@ class RustLexer(RegexLexer):
(r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
# Keywords
(words((
- 'as', 'async', 'await', 'box', 'const', 'crate', 'else',
- 'extern', 'for', 'if', 'impl', 'in', 'loop', 'match', 'move',
- 'mut', 'pub', 'ref', 'return', 'static', 'super', 'trait',
- 'try', 'unsafe', 'use', 'where', 'while'), suffix=r'\b'),
+ 'as', 'async', 'await', 'box', 'const', 'crate', 'else',
+ 'extern', 'for', 'if', 'impl', 'in', 'loop', 'match', 'move',
+ 'mut', 'pub', 'ref', 'return', 'static', 'super', 'trait',
+ 'try', 'unsafe', 'use', 'where', 'while'), suffix=r'\b'),
Keyword),
(words(('abstract', 'alignof', 'become', 'do', 'final', 'macro',
'offsetof', 'override', 'priv', 'proc', 'pure', 'sizeof',
@@ -95,7 +95,7 @@ class RustLexer(RegexLexer):
(r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)),
keyword_types,
(r'self\b', Name.Builtin.Pseudo),
- # Prelude (taken from Rust's src/libstd/prelude.rs)
+ # Prelude (taken from Rust's src/libstd/prelude.rs)
builtin_types,
# Path seperators, so types don't catch them.
(r'::\b', Text),
diff --git a/contrib/python/Pygments/py2/pygments/lexers/scdoc.py b/contrib/python/Pygments/py2/pygments/lexers/scdoc.py
index 4916393fde..40b3ac9147 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/scdoc.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/scdoc.py
@@ -1,70 +1,70 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.scdoc
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for scdoc, a simple man page generator.
-
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, \
- using, this
-from pygments.token import Text, Comment, Keyword, String, \
- Generic
-
-
-__all__ = ['ScdocLexer']
-
-
-class ScdocLexer(RegexLexer):
- """
- `scdoc` is a simple man page generator for POSIX systems written in C99.
- https://git.sr.ht/~sircmpwn/scdoc
-
- .. versionadded:: 2.5
- """
- name = 'scdoc'
- aliases = ['scdoc', 'scd']
- filenames = ['*.scd', '*.scdoc']
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- # comment
- (r'^(;.+\n)', bygroups(Comment)),
-
- # heading with pound prefix
- (r'^(#)([^#].+\n)', bygroups(Generic.Heading, Text)),
- (r'^(#{2})(.+\n)', bygroups(Generic.Subheading, Text)),
- # bulleted lists
- (r'^(\s*)([*-])(\s)(.+\n)',
- bygroups(Text, Keyword, Text, using(this, state='inline'))),
- # numbered lists
- (r'^(\s*)(\.+\.)( .+\n)',
- bygroups(Text, Keyword, using(this, state='inline'))),
- # quote
- (r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
- # text block
- (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
-
- include('inline'),
- ],
- 'inline': [
- # escape
- (r'\\.', Text),
- # underlines
- (r'(\s)(_[^_]+_)(\W|\n)', bygroups(Text, Generic.Emph, Text)),
- # bold
- (r'(\s)(\*[^\*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)),
- # inline code
- (r'`[^`]+`', String.Backtick),
-
- # general text, must come last!
- (r'[^\\\s]+', Text),
- (r'.', Text),
- ],
- }
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.scdoc
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for scdoc, a simple man page generator.
+
+ :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, \
+ using, this
+from pygments.token import Text, Comment, Keyword, String, \
+ Generic
+
+
+__all__ = ['ScdocLexer']
+
+
+class ScdocLexer(RegexLexer):
+ """
+ `scdoc` is a simple man page generator for POSIX systems written in C99.
+ https://git.sr.ht/~sircmpwn/scdoc
+
+ .. versionadded:: 2.5
+ """
+ name = 'scdoc'
+ aliases = ['scdoc', 'scd']
+ filenames = ['*.scd', '*.scdoc']
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ # comment
+ (r'^(;.+\n)', bygroups(Comment)),
+
+ # heading with pound prefix
+ (r'^(#)([^#].+\n)', bygroups(Generic.Heading, Text)),
+ (r'^(#{2})(.+\n)', bygroups(Generic.Subheading, Text)),
+ # bulleted lists
+ (r'^(\s*)([*-])(\s)(.+\n)',
+ bygroups(Text, Keyword, Text, using(this, state='inline'))),
+ # numbered lists
+ (r'^(\s*)(\.+\.)( .+\n)',
+ bygroups(Text, Keyword, using(this, state='inline'))),
+ # quote
+ (r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
+ # text block
+ (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
+
+ include('inline'),
+ ],
+ 'inline': [
+ # escape
+ (r'\\.', Text),
+ # underlines
+ (r'(\s)(_[^_]+_)(\W|\n)', bygroups(Text, Generic.Emph, Text)),
+ # bold
+ (r'(\s)(\*[^\*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)),
+ # inline code
+ (r'`[^`]+`', String.Backtick),
+
+ # general text, must come last!
+ (r'[^\\\s]+', Text),
+ (r'.', Text),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/shell.py b/contrib/python/Pygments/py2/pygments/lexers/shell.py
index c12cb3f137..c64f151dd4 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/shell.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/shell.py
@@ -154,9 +154,9 @@ class ShellSessionBaseLexer(Lexer):
.. versionadded:: 2.1
"""
-
- _venv = re.compile(r'^(\([^)]*\))(\s*)')
-
+
+ _venv = re.compile(r'^(\([^)]*\))(\s*)')
+
def get_tokens_unprocessed(self, text):
innerlexer = self._innerLexerCls(**self.options)
@@ -170,21 +170,21 @@ class ShellSessionBaseLexer(Lexer):
if backslash_continuation:
curcode += line
backslash_continuation = curcode.endswith('\\\n')
- continue
-
- venv_match = self._venv.match(line)
- if venv_match:
- venv = venv_match.group(1)
- venv_whitespace = venv_match.group(2)
- insertions.append((len(curcode),
- [(0, Generic.Prompt.VirtualEnv, venv)]))
- if venv_whitespace:
- insertions.append((len(curcode),
- [(0, Text, venv_whitespace)]))
- line = line[venv_match.end():]
-
- m = self._ps1rgx.match(line)
- if m:
+ continue
+
+ venv_match = self._venv.match(line)
+ if venv_match:
+ venv = venv_match.group(1)
+ venv_whitespace = venv_match.group(2)
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt.VirtualEnv, venv)]))
+ if venv_whitespace:
+ insertions.append((len(curcode),
+ [(0, Text, venv_whitespace)]))
+ line = line[venv_match.end():]
+
+ m = self._ps1rgx.match(line)
+ if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
@@ -227,9 +227,9 @@ class BashSessionLexer(ShellSessionBaseLexer):
mimetypes = ['application/x-shell-session', 'application/x-sh-session']
_innerLexerCls = BashLexer
- _ps1rgx = re.compile(
+ _ps1rgx = re.compile(
r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
- r'?|\[\S+[@:][^\n]+\].+))\s*[$#%])(.*\n?)')
+ r'?|\[\S+[@:][^\n]+\].+))\s*[$#%])(.*\n?)')
_ps2 = '>'
@@ -556,7 +556,7 @@ class MSDOSSessionLexer(ShellSessionBaseLexer):
mimetypes = []
_innerLexerCls = BatchLexer
- _ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
+ _ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
_ps2 = 'More? '
@@ -641,7 +641,7 @@ class TcshSessionLexer(ShellSessionBaseLexer):
mimetypes = []
_innerLexerCls = TcshLexer
- _ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
+ _ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
_ps2 = '? '
@@ -772,7 +772,7 @@ class PowerShellSessionLexer(ShellSessionBaseLexer):
mimetypes = []
_innerLexerCls = PowerShellLexer
- _ps1rgx = re.compile(r'^(PS [^>]+> )(.*\n?)')
+ _ps1rgx = re.compile(r'^(PS [^>]+> )(.*\n?)')
_ps2 = '>> '
diff --git a/contrib/python/Pygments/py2/pygments/lexers/slash.py b/contrib/python/Pygments/py2/pygments/lexers/slash.py
index 76e5929d38..94ca655e70 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/slash.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/slash.py
@@ -26,7 +26,7 @@ class SlashLanguageLexer(ExtendedRegexLexer):
def right_angle_bracket(lexer, match, ctx):
if len(ctx.stack) > 1 and ctx.stack[-2] == "string":
ctx.stack.pop()
- yield match.start(), String.Interpol, u"}"
+ yield match.start(), String.Interpol, u"}"
ctx.pos = match.end()
pass
diff --git a/contrib/python/Pygments/py2/pygments/lexers/solidity.py b/contrib/python/Pygments/py2/pygments/lexers/solidity.py
index 9966837197..c16daa3f35 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/solidity.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/solidity.py
@@ -1,93 +1,93 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.solidity
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Solidity.
-
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['SolidityLexer']
-
-
-class SolidityLexer(RegexLexer):
- """
- For Solidity source code.
-
- .. versionadded:: 2.5
- """
-
- name = 'Solidity'
- aliases = ['solidity']
- filenames = ['*.sol']
- mimetypes = []
-
- flags = re.MULTILINE | re.UNICODE
-
- datatype = (
- r'\b(address|bool|((bytes|hash|int|string|uint)(8|16|24|32|40|48|56|64'
- r'|72|80|88|96|104|112|120|128|136|144|152|160|168|176|184|192|200|208'
- r'|216|224|232|240|248|256)?))\b'
- )
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comments'),
- (r'\bpragma\s+solidity\b', Keyword, 'pragma'),
- (r'\b(contract)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword, Text.WhiteSpace, Name.Entity)),
- (datatype + r'(\s+)((external|public|internal|private)\s+)?' +
- r'([a-zA-Z_]\w*)',
- bygroups(Keyword.Type, None, None, None, Text.WhiteSpace, Keyword,
- None, Name.Variable)),
- (r'\b(enum|event|function|struct)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Type, Text.WhiteSpace, Name.Variable)),
- (r'\b(msg|block|tx)\.([A-Za-z_][A-Za-z0-9_]*)\b', Keyword),
- (words((
- 'block', 'break', 'constant', 'constructor', 'continue',
- 'contract', 'do', 'else', 'external', 'false', 'for',
- 'function', 'if', 'import', 'inherited', 'internal', 'is',
- 'library', 'mapping', 'memory', 'modifier', 'msg', 'new',
- 'payable', 'private', 'public', 'require', 'return',
- 'returns', 'struct', 'suicide', 'throw', 'this', 'true',
- 'tx', 'var', 'while'), prefix=r'\b', suffix=r'\b'),
- Keyword.Type),
- (words(('keccak256',), prefix=r'\b', suffix=r'\b'), Name.Builtin),
- (datatype, Keyword.Type),
- include('constants'),
- (r'[a-zA-Z_]\w*', Text),
- (r'[!<=>+*/-]', Operator),
- (r'[.;:{}(),\[\]]', Punctuation)
- ],
- 'comments': [
- (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
- (r'/(\\\n)?[*][\w\W]*', Comment.Multiline)
- ],
- 'constants': [
- (r'("([\\]"|.)*?")', String.Double),
- (r"('([\\]'|.)*?')", String.Single),
- (r'\b0[xX][0-9a-fA-F]+\b', Number.Hex),
- (r'\b\d+\b', Number.Decimal),
- ],
- 'pragma': [
- include('whitespace'),
- include('comments'),
- (r'(\^|>=|<)(\s*)(\d+\.\d+\.\d+)',
- bygroups(Operator, Text.WhiteSpace, Keyword)),
- (r';', Punctuation, '#pop')
- ],
- 'whitespace': [
- (r'\s+', Text.WhiteSpace),
- (r'\n', Text.WhiteSpace)
- ]
- }
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.solidity
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Solidity.
+
+ :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SolidityLexer']
+
+
+class SolidityLexer(RegexLexer):
+ """
+ For Solidity source code.
+
+ .. versionadded:: 2.5
+ """
+
+ name = 'Solidity'
+ aliases = ['solidity']
+ filenames = ['*.sol']
+ mimetypes = []
+
+ flags = re.MULTILINE | re.UNICODE
+
+ datatype = (
+ r'\b(address|bool|((bytes|hash|int|string|uint)(8|16|24|32|40|48|56|64'
+ r'|72|80|88|96|104|112|120|128|136|144|152|160|168|176|184|192|200|208'
+ r'|216|224|232|240|248|256)?))\b'
+ )
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('comments'),
+ (r'\bpragma\s+solidity\b', Keyword, 'pragma'),
+ (r'\b(contract)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword, Text.WhiteSpace, Name.Entity)),
+ (datatype + r'(\s+)((external|public|internal|private)\s+)?' +
+ r'([a-zA-Z_]\w*)',
+ bygroups(Keyword.Type, None, None, None, Text.WhiteSpace, Keyword,
+ None, Name.Variable)),
+ (r'\b(enum|event|function|struct)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword.Type, Text.WhiteSpace, Name.Variable)),
+ (r'\b(msg|block|tx)\.([A-Za-z_][A-Za-z0-9_]*)\b', Keyword),
+ (words((
+ 'block', 'break', 'constant', 'constructor', 'continue',
+ 'contract', 'do', 'else', 'external', 'false', 'for',
+ 'function', 'if', 'import', 'inherited', 'internal', 'is',
+ 'library', 'mapping', 'memory', 'modifier', 'msg', 'new',
+ 'payable', 'private', 'public', 'require', 'return',
+ 'returns', 'struct', 'suicide', 'throw', 'this', 'true',
+ 'tx', 'var', 'while'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Type),
+ (words(('keccak256',), prefix=r'\b', suffix=r'\b'), Name.Builtin),
+ (datatype, Keyword.Type),
+ include('constants'),
+ (r'[a-zA-Z_]\w*', Text),
+ (r'[!<=>+*/-]', Operator),
+ (r'[.;:{}(),\[\]]', Punctuation)
+ ],
+ 'comments': [
+ (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
+ (r'/(\\\n)?[*][\w\W]*', Comment.Multiline)
+ ],
+ 'constants': [
+ (r'("([\\]"|.)*?")', String.Double),
+ (r"('([\\]'|.)*?')", String.Single),
+ (r'\b0[xX][0-9a-fA-F]+\b', Number.Hex),
+ (r'\b\d+\b', Number.Decimal),
+ ],
+ 'pragma': [
+ include('whitespace'),
+ include('comments'),
+ (r'(\^|>=|<)(\s*)(\d+\.\d+\.\d+)',
+ bygroups(Operator, Text.WhiteSpace, Keyword)),
+ (r';', Punctuation, '#pop')
+ ],
+ 'whitespace': [
+ (r'\s+', Text.WhiteSpace),
+ (r'\n', Text.WhiteSpace)
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/special.py b/contrib/python/Pygments/py2/pygments/lexers/special.py
index 4016c5949b..6ee1b59346 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/special.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/special.py
@@ -35,7 +35,7 @@ class TextLexer(Lexer):
def analyse_text(text):
return TextLexer.priority
-
+
_ttype_cache = {}
line_re = re.compile(b'.*?\n')
diff --git a/contrib/python/Pygments/py2/pygments/lexers/sql.py b/contrib/python/Pygments/py2/pygments/lexers/sql.py
index afcaa6d4f7..a975e3f2ed 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/sql.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/sql.py
@@ -41,8 +41,8 @@
import re
from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words
-from pygments.token import Punctuation, Whitespace, Text, Comment, Operator, \
- Keyword, Name, String, Number, Generic
+from pygments.token import Punctuation, Whitespace, Text, Comment, Operator, \
+ Keyword, Name, String, Number, Generic
from pygments.lexers import get_lexer_by_name, ClassNotFound
from pygments.util import iteritems
@@ -75,28 +75,28 @@ def language_callback(lexer, match):
The lexer is chosen looking for a nearby LANGUAGE or assumed as
plpgsql if inside a DO statement and no LANGUAGE has been found.
"""
- lx = None
+ lx = None
m = language_re.match(lexer.text[match.end():match.end()+100])
if m is not None:
- lx = lexer._get_lexer(m.group(1))
+ lx = lexer._get_lexer(m.group(1))
else:
m = list(language_re.finditer(
lexer.text[max(0, match.start()-100):match.start()]))
if m:
- lx = lexer._get_lexer(m[-1].group(1))
+ lx = lexer._get_lexer(m[-1].group(1))
else:
m = list(do_re.finditer(
lexer.text[max(0, match.start()-25):match.start()]))
if m:
- lx = lexer._get_lexer('plpgsql')
+ lx = lexer._get_lexer('plpgsql')
# 1 = $, 2 = delimiter, 3 = $
yield (match.start(1), String, match.group(1))
yield (match.start(2), String.Delimiter, match.group(2))
yield (match.start(3), String, match.group(3))
# 4 = string contents
- if lx:
- for x in lx.get_tokens_unprocessed(match.group(4)):
+ if lx:
+ for x in lx.get_tokens_unprocessed(match.group(4)):
yield x
else:
yield (match.start(4), String, match.group(4))
@@ -134,9 +134,9 @@ class PostgresBase(object):
if lang.startswith('pl') and lang.endswith('u'):
tries.append(lang[2:-1])
- for lx in tries:
+ for lx in tries:
try:
- return get_lexer_by_name(lx, **self.options)
+ return get_lexer_by_name(lx, **self.options)
except ClassNotFound:
pass
else:
@@ -163,8 +163,8 @@ class PostgresLexer(PostgresBase, RegexLexer):
(r'--.*\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'(' + '|'.join(s.replace(" ", r"\s+")
- for s in DATATYPES + PSEUDO_TYPES) + r')\b',
- Name.Builtin),
+ for s in DATATYPES + PSEUDO_TYPES) + r')\b',
+ Name.Builtin),
(words(KEYWORDS, suffix=r'\b'), Keyword),
(r'[+*/<>=~!@#%^&|`?-]+', Operator),
(r'::', Operator), # cast
@@ -212,7 +212,7 @@ class PlPgsqlLexer(PostgresBase, RegexLexer):
mimetypes = ['text/x-plpgsql']
flags = re.IGNORECASE
- tokens = {k: l[:] for (k, l) in iteritems(PostgresLexer.tokens)}
+ tokens = {k: l[:] for (k, l) in iteritems(PostgresLexer.tokens)}
# extend the keywords list
for i, pattern in enumerate(tokens['root']):
@@ -246,7 +246,7 @@ class PsqlRegexLexer(PostgresBase, RegexLexer):
aliases = [] # not public
flags = re.IGNORECASE
- tokens = {k: l[:] for (k, l) in iteritems(PostgresLexer.tokens)}
+ tokens = {k: l[:] for (k, l) in iteritems(PostgresLexer.tokens)}
tokens['root'].append(
(r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
@@ -260,7 +260,7 @@ class PsqlRegexLexer(PostgresBase, RegexLexer):
(r"[^\s]+", String.Symbol),
]
-
+
re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
re_psql_command = re.compile(r'\s*\\')
re_end_command = re.compile(r';\s*(--.*?)?$')
@@ -385,98 +385,98 @@ class SqlLexer(RegexLexer):
(r'--.*\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(words((
- 'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER',
- 'AGGREGATE', 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE',
- 'AND', 'ANY', 'ARE', 'AS', 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT',
- 'ASYMMETRIC', 'AT', 'ATOMIC', 'AUTHORIZATION', 'AVG', 'BACKWARD',
- 'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR', 'BIT_LENGTH', 'BOTH', 'BREADTH',
- 'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY', 'CASCADE',
- 'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN',
+ 'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER',
+ 'AGGREGATE', 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE',
+ 'AND', 'ANY', 'ARE', 'AS', 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT',
+ 'ASYMMETRIC', 'AT', 'ATOMIC', 'AUTHORIZATION', 'AVG', 'BACKWARD',
+ 'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR', 'BIT_LENGTH', 'BOTH', 'BREADTH',
+ 'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY', 'CASCADE',
+ 'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN',
'CHARACTERISTICS', 'CHARACTER_LENGTH', 'CHARACTER_SET_CATALOG',
'CHARACTER_SET_NAME', 'CHARACTER_SET_SCHEMA', 'CHAR_LENGTH', 'CHECK',
- 'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE',
- 'CLUSTER', 'COALSECE', 'COBOL', 'COLLATE', 'COLLATION',
- 'COLLATION_CATALOG', 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN',
- 'COLUMN_NAME', 'COMMAND_FUNCTION', 'COMMAND_FUNCTION_CODE', 'COMMENT',
- 'COMMIT', 'COMMITTED', 'COMPLETION', 'CONDITION_NUMBER', 'CONNECT',
- 'CONNECTION', 'CONNECTION_NAME', 'CONSTRAINT', 'CONSTRAINTS',
- 'CONSTRAINT_CATALOG', 'CONSTRAINT_NAME', 'CONSTRAINT_SCHEMA',
- 'CONSTRUCTOR', 'CONTAINS', 'CONTINUE', 'CONVERSION', 'CONVERT',
- 'COPY', 'CORRESPONTING', 'COUNT', 'CREATE', 'CREATEDB', 'CREATEUSER',
- 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE', 'CURRENT_PATH',
- 'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
- 'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE',
+ 'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE',
+ 'CLUSTER', 'COALSECE', 'COBOL', 'COLLATE', 'COLLATION',
+ 'COLLATION_CATALOG', 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN',
+ 'COLUMN_NAME', 'COMMAND_FUNCTION', 'COMMAND_FUNCTION_CODE', 'COMMENT',
+ 'COMMIT', 'COMMITTED', 'COMPLETION', 'CONDITION_NUMBER', 'CONNECT',
+ 'CONNECTION', 'CONNECTION_NAME', 'CONSTRAINT', 'CONSTRAINTS',
+ 'CONSTRAINT_CATALOG', 'CONSTRAINT_NAME', 'CONSTRAINT_SCHEMA',
+ 'CONSTRUCTOR', 'CONTAINS', 'CONTINUE', 'CONVERSION', 'CONVERT',
+ 'COPY', 'CORRESPONTING', 'COUNT', 'CREATE', 'CREATEDB', 'CREATEUSER',
+ 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE', 'CURRENT_PATH',
+ 'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
+ 'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE',
'DATETIME_INTERVAL_CODE', 'DATETIME_INTERVAL_PRECISION', 'DAY',
- 'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE',
- 'DEFERRED', 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS',
- 'DEREF', 'DESC', 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR',
- 'DETERMINISTIC', 'DIAGNOSTICS', 'DICTIONARY', 'DISCONNECT', 'DISPATCH',
- 'DISTINCT', 'DO', 'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION',
- 'DYNAMIC_FUNCTION_CODE', 'EACH', 'ELSE', 'ELSIF', 'ENCODING',
- 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY', 'EXCEPTION',
- 'EXCEPT', 'EXCLUDING', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTING',
- 'EXISTS', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FALSE', 'FETCH', 'FINAL',
- 'FIRST', 'FOR', 'FORCE', 'FOREIGN', 'FORTRAN', 'FORWARD', 'FOUND', 'FREE',
- 'FREEZE', 'FROM', 'FULL', 'FUNCTION', 'G', 'GENERAL', 'GENERATED', 'GET',
- 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GRANTED', 'GROUP', 'GROUPING',
- 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY', 'IF',
- 'IGNORE', 'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLEMENTATION', 'IMPLICIT',
- 'IN', 'INCLUDING', 'INCREMENT', 'INDEX', 'INDITCATOR', 'INFIX',
- 'INHERITS', 'INITIALIZE', 'INITIALLY', 'INNER', 'INOUT', 'INPUT',
- 'INSENSITIVE', 'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INTERSECT', 'INTO',
- 'INVOKER', 'IS', 'ISNULL', 'ISOLATION', 'ITERATE', 'JOIN', 'KEY',
- 'KEY_MEMBER', 'KEY_TYPE', 'LANCOMPILER', 'LANGUAGE', 'LARGE', 'LAST',
- 'LATERAL', 'LEADING', 'LEFT', 'LENGTH', 'LESS', 'LEVEL', 'LIKE', 'LIMIT',
- 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOCATION',
- 'LOCATOR', 'LOCK', 'LOWER', 'MAP', 'MATCH', 'MAX', 'MAXVALUE',
- 'MESSAGE_LENGTH', 'MESSAGE_OCTET_LENGTH', 'MESSAGE_TEXT', 'METHOD', 'MIN',
- 'MINUTE', 'MINVALUE', 'MOD', 'MODE', 'MODIFIES', 'MODIFY', 'MONTH',
- 'MORE', 'MOVE', 'MUMPS', 'NAMES', 'NATIONAL', 'NATURAL', 'NCHAR', 'NCLOB',
- 'NEW', 'NEXT', 'NO', 'NOCREATEDB', 'NOCREATEUSER', 'NONE', 'NOT',
- 'NOTHING', 'NOTIFY', 'NOTNULL', 'NULL', 'NULLABLE', 'NULLIF', 'OBJECT',
- 'OCTET_LENGTH', 'OF', 'OFF', 'OFFSET', 'OIDS', 'OLD', 'ON', 'ONLY',
- 'OPEN', 'OPERATION', 'OPERATOR', 'OPTION', 'OPTIONS', 'OR', 'ORDER',
- 'ORDINALITY', 'OUT', 'OUTER', 'OUTPUT', 'OVERLAPS', 'OVERLAY',
- 'OVERRIDING', 'OWNER', 'PAD', 'PARAMETER', 'PARAMETERS', 'PARAMETER_MODE',
- 'PARAMATER_NAME', 'PARAMATER_ORDINAL_POSITION',
- 'PARAMETER_SPECIFIC_CATALOG', 'PARAMETER_SPECIFIC_NAME',
- 'PARAMATER_SPECIFIC_SCHEMA', 'PARTIAL', 'PASCAL', 'PENDANT', 'PLACING',
- 'PLI', 'POSITION', 'POSTFIX', 'PRECISION', 'PREFIX', 'PREORDER',
- 'PREPARE', 'PRESERVE', 'PRIMARY', 'PRIOR', 'PRIVILEGES', 'PROCEDURAL',
- 'PROCEDURE', 'PUBLIC', 'READ', 'READS', 'RECHECK', 'RECURSIVE', 'REF',
- 'REFERENCES', 'REFERENCING', 'REINDEX', 'RELATIVE', 'RENAME',
- 'REPEATABLE', 'REPLACE', 'RESET', 'RESTART', 'RESTRICT', 'RESULT',
- 'RETURN', 'RETURNED_LENGTH', 'RETURNED_OCTET_LENGTH', 'RETURNED_SQLSTATE',
- 'RETURNS', 'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROLLUP', 'ROUTINE',
- 'ROUTINE_CATALOG', 'ROUTINE_NAME', 'ROUTINE_SCHEMA', 'ROW', 'ROWS',
- 'ROW_COUNT', 'RULE', 'SAVE_POINT', 'SCALE', 'SCHEMA', 'SCHEMA_NAME',
- 'SCOPE', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY', 'SELECT', 'SELF',
- 'SENSITIVE', 'SERIALIZABLE', 'SERVER_NAME', 'SESSION', 'SESSION_USER',
- 'SET', 'SETOF', 'SETS', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE', 'SIZE',
- 'SOME', 'SOURCE', 'SPACE', 'SPECIFIC', 'SPECIFICTYPE', 'SPECIFIC_NAME',
- 'SQL', 'SQLCODE', 'SQLERROR', 'SQLEXCEPTION', 'SQLSTATE', 'SQLWARNINIG',
- 'STABLE', 'START', 'STATE', 'STATEMENT', 'STATIC', 'STATISTICS', 'STDIN',
- 'STDOUT', 'STORAGE', 'STRICT', 'STRUCTURE', 'STYPE', 'SUBCLASS_ORIGIN',
- 'SUBLIST', 'SUBSTRING', 'SUM', 'SYMMETRIC', 'SYSID', 'SYSTEM',
- 'SYSTEM_USER', 'TABLE', 'TABLE_NAME', ' TEMP', 'TEMPLATE', 'TEMPORARY',
- 'TERMINATE', 'THAN', 'THEN', 'TIMESTAMP', 'TIMEZONE_HOUR',
- 'TIMEZONE_MINUTE', 'TO', 'TOAST', 'TRAILING', 'TRANSATION',
- 'TRANSACTIONS_COMMITTED', 'TRANSACTIONS_ROLLED_BACK', 'TRANSATION_ACTIVE',
- 'TRANSFORM', 'TRANSFORMS', 'TRANSLATE', 'TRANSLATION', 'TREAT', 'TRIGGER',
- 'TRIGGER_CATALOG', 'TRIGGER_NAME', 'TRIGGER_SCHEMA', 'TRIM', 'TRUE',
- 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNCOMMITTED', 'UNDER', 'UNENCRYPTED',
- 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN', 'UNNAMED', 'UNNEST', 'UNTIL',
- 'UPDATE', 'UPPER', 'USAGE', 'USER', 'USER_DEFINED_TYPE_CATALOG',
- 'USER_DEFINED_TYPE_NAME', 'USER_DEFINED_TYPE_SCHEMA', 'USING', 'VACUUM',
- 'VALID', 'VALIDATOR', 'VALUES', 'VARIABLE', 'VERBOSE', 'VERSION', 'VIEW',
- 'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE', 'WITH', 'WITHOUT', 'WORK',
- 'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'),
+ 'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE',
+ 'DEFERRED', 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS',
+ 'DEREF', 'DESC', 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR',
+ 'DETERMINISTIC', 'DIAGNOSTICS', 'DICTIONARY', 'DISCONNECT', 'DISPATCH',
+ 'DISTINCT', 'DO', 'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION',
+ 'DYNAMIC_FUNCTION_CODE', 'EACH', 'ELSE', 'ELSIF', 'ENCODING',
+ 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY', 'EXCEPTION',
+ 'EXCEPT', 'EXCLUDING', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTING',
+ 'EXISTS', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FALSE', 'FETCH', 'FINAL',
+ 'FIRST', 'FOR', 'FORCE', 'FOREIGN', 'FORTRAN', 'FORWARD', 'FOUND', 'FREE',
+ 'FREEZE', 'FROM', 'FULL', 'FUNCTION', 'G', 'GENERAL', 'GENERATED', 'GET',
+ 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GRANTED', 'GROUP', 'GROUPING',
+ 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY', 'IF',
+ 'IGNORE', 'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLEMENTATION', 'IMPLICIT',
+ 'IN', 'INCLUDING', 'INCREMENT', 'INDEX', 'INDITCATOR', 'INFIX',
+ 'INHERITS', 'INITIALIZE', 'INITIALLY', 'INNER', 'INOUT', 'INPUT',
+ 'INSENSITIVE', 'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INTERSECT', 'INTO',
+ 'INVOKER', 'IS', 'ISNULL', 'ISOLATION', 'ITERATE', 'JOIN', 'KEY',
+ 'KEY_MEMBER', 'KEY_TYPE', 'LANCOMPILER', 'LANGUAGE', 'LARGE', 'LAST',
+ 'LATERAL', 'LEADING', 'LEFT', 'LENGTH', 'LESS', 'LEVEL', 'LIKE', 'LIMIT',
+ 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOCATION',
+ 'LOCATOR', 'LOCK', 'LOWER', 'MAP', 'MATCH', 'MAX', 'MAXVALUE',
+ 'MESSAGE_LENGTH', 'MESSAGE_OCTET_LENGTH', 'MESSAGE_TEXT', 'METHOD', 'MIN',
+ 'MINUTE', 'MINVALUE', 'MOD', 'MODE', 'MODIFIES', 'MODIFY', 'MONTH',
+ 'MORE', 'MOVE', 'MUMPS', 'NAMES', 'NATIONAL', 'NATURAL', 'NCHAR', 'NCLOB',
+ 'NEW', 'NEXT', 'NO', 'NOCREATEDB', 'NOCREATEUSER', 'NONE', 'NOT',
+ 'NOTHING', 'NOTIFY', 'NOTNULL', 'NULL', 'NULLABLE', 'NULLIF', 'OBJECT',
+ 'OCTET_LENGTH', 'OF', 'OFF', 'OFFSET', 'OIDS', 'OLD', 'ON', 'ONLY',
+ 'OPEN', 'OPERATION', 'OPERATOR', 'OPTION', 'OPTIONS', 'OR', 'ORDER',
+ 'ORDINALITY', 'OUT', 'OUTER', 'OUTPUT', 'OVERLAPS', 'OVERLAY',
+ 'OVERRIDING', 'OWNER', 'PAD', 'PARAMETER', 'PARAMETERS', 'PARAMETER_MODE',
+ 'PARAMATER_NAME', 'PARAMATER_ORDINAL_POSITION',
+ 'PARAMETER_SPECIFIC_CATALOG', 'PARAMETER_SPECIFIC_NAME',
+ 'PARAMATER_SPECIFIC_SCHEMA', 'PARTIAL', 'PASCAL', 'PENDANT', 'PLACING',
+ 'PLI', 'POSITION', 'POSTFIX', 'PRECISION', 'PREFIX', 'PREORDER',
+ 'PREPARE', 'PRESERVE', 'PRIMARY', 'PRIOR', 'PRIVILEGES', 'PROCEDURAL',
+ 'PROCEDURE', 'PUBLIC', 'READ', 'READS', 'RECHECK', 'RECURSIVE', 'REF',
+ 'REFERENCES', 'REFERENCING', 'REINDEX', 'RELATIVE', 'RENAME',
+ 'REPEATABLE', 'REPLACE', 'RESET', 'RESTART', 'RESTRICT', 'RESULT',
+ 'RETURN', 'RETURNED_LENGTH', 'RETURNED_OCTET_LENGTH', 'RETURNED_SQLSTATE',
+ 'RETURNS', 'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROLLUP', 'ROUTINE',
+ 'ROUTINE_CATALOG', 'ROUTINE_NAME', 'ROUTINE_SCHEMA', 'ROW', 'ROWS',
+ 'ROW_COUNT', 'RULE', 'SAVE_POINT', 'SCALE', 'SCHEMA', 'SCHEMA_NAME',
+ 'SCOPE', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY', 'SELECT', 'SELF',
+ 'SENSITIVE', 'SERIALIZABLE', 'SERVER_NAME', 'SESSION', 'SESSION_USER',
+ 'SET', 'SETOF', 'SETS', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE', 'SIZE',
+ 'SOME', 'SOURCE', 'SPACE', 'SPECIFIC', 'SPECIFICTYPE', 'SPECIFIC_NAME',
+ 'SQL', 'SQLCODE', 'SQLERROR', 'SQLEXCEPTION', 'SQLSTATE', 'SQLWARNINIG',
+ 'STABLE', 'START', 'STATE', 'STATEMENT', 'STATIC', 'STATISTICS', 'STDIN',
+ 'STDOUT', 'STORAGE', 'STRICT', 'STRUCTURE', 'STYPE', 'SUBCLASS_ORIGIN',
+ 'SUBLIST', 'SUBSTRING', 'SUM', 'SYMMETRIC', 'SYSID', 'SYSTEM',
+ 'SYSTEM_USER', 'TABLE', 'TABLE_NAME', ' TEMP', 'TEMPLATE', 'TEMPORARY',
+ 'TERMINATE', 'THAN', 'THEN', 'TIMESTAMP', 'TIMEZONE_HOUR',
+ 'TIMEZONE_MINUTE', 'TO', 'TOAST', 'TRAILING', 'TRANSATION',
+ 'TRANSACTIONS_COMMITTED', 'TRANSACTIONS_ROLLED_BACK', 'TRANSATION_ACTIVE',
+ 'TRANSFORM', 'TRANSFORMS', 'TRANSLATE', 'TRANSLATION', 'TREAT', 'TRIGGER',
+ 'TRIGGER_CATALOG', 'TRIGGER_NAME', 'TRIGGER_SCHEMA', 'TRIM', 'TRUE',
+ 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNCOMMITTED', 'UNDER', 'UNENCRYPTED',
+ 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN', 'UNNAMED', 'UNNEST', 'UNTIL',
+ 'UPDATE', 'UPPER', 'USAGE', 'USER', 'USER_DEFINED_TYPE_CATALOG',
+ 'USER_DEFINED_TYPE_NAME', 'USER_DEFINED_TYPE_SCHEMA', 'USING', 'VACUUM',
+ 'VALID', 'VALIDATOR', 'VALUES', 'VARIABLE', 'VERBOSE', 'VERSION', 'VIEW',
+ 'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE', 'WITH', 'WITHOUT', 'WORK',
+ 'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'),
Keyword),
(words((
- 'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR',
- 'CHARACTER', 'DATE', 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER',
- 'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL', 'SERIAL', 'SMALLINT',
- 'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'),
+ 'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR',
+ 'CHARACTER', 'DATE', 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER',
+ 'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL', 'SERIAL', 'SMALLINT',
+ 'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'),
Name.Builtin),
(r'[+*/<>=~!@#%^&|`?-]', Operator),
(r'[0-9]+', Number.Integer),
@@ -560,15 +560,15 @@ class TransactSqlLexer(RegexLexer):
rating = 1.0
else:
name_between_backtick_count = len(
- name_between_backtick_re.findall(text))
+ name_between_backtick_re.findall(text))
name_between_bracket_count = len(
name_between_bracket_re.findall(text))
# We need to check if there are any names using
# backticks or brackets, as otherwise both are 0
# and 0 >= 2 * 0, so we would always assume it's true
dialect_name_count = name_between_backtick_count + name_between_bracket_count
- if dialect_name_count >= 1 and \
- name_between_bracket_count >= 2 * name_between_backtick_count:
+ if dialect_name_count >= 1 and \
+ name_between_bracket_count >= 2 * name_between_backtick_count:
# Found at least twice as many [name] as `name`.
rating += 0.5
elif name_between_bracket_count > name_between_backtick_count:
@@ -657,13 +657,13 @@ class MySqlLexer(RegexLexer):
def analyse_text(text):
rating = 0
name_between_backtick_count = len(
- name_between_backtick_re.findall(text))
+ name_between_backtick_re.findall(text))
name_between_bracket_count = len(
name_between_bracket_re.findall(text))
# Same logic as above in the TSQL analysis
dialect_name_count = name_between_backtick_count + name_between_bracket_count
- if dialect_name_count >= 1 and \
- name_between_backtick_count >= 2 * name_between_bracket_count:
+ if dialect_name_count >= 1 and \
+ name_between_backtick_count >= 2 * name_between_bracket_count:
# Found at least twice as many `name` as [name].
rating += 0.5
elif name_between_backtick_count > name_between_bracket_count:
diff --git a/contrib/python/Pygments/py2/pygments/lexers/templates.py b/contrib/python/Pygments/py2/pygments/lexers/templates.py
index f891242cb8..edd8179daf 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/templates.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/templates.py
@@ -226,7 +226,7 @@ class VelocityLexer(RegexLexer):
'directiveparams'),
(r'(#\{?)(' + identifier + r')(\}|\b)',
bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
- (r'\$!?\{?', Punctuation, 'variable')
+ (r'\$!?\{?', Punctuation, 'variable')
],
'variable': [
(identifier, Name.Variable),
@@ -249,7 +249,7 @@ class VelocityLexer(RegexLexer):
(r'\]', Operator, '#pop')
],
'funcparams': [
- (r'\$!?\{?', Punctuation, 'variable'),
+ (r'\$!?\{?', Punctuation, 'variable'),
(r'\s+', Text),
(r'[,:]', Punctuation),
(r'"(\\\\|\\"|[^"])*"', String.Double),
@@ -274,7 +274,7 @@ class VelocityLexer(RegexLexer):
rv += 0.15
if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
rv += 0.15
- if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
+ if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
r'(\.\w+(\([^)]*\))?)*\}?', text):
rv += 0.01
return rv
@@ -1802,26 +1802,26 @@ class HandlebarsLexer(RegexLexer):
'root': [
(r'[^{]+', Other),
- # Comment start {{! }} or {{!--
+ # Comment start {{! }} or {{!--
(r'\{\{!.*\}\}', Comment),
- # HTML Escaping open {{{expression
+ # HTML Escaping open {{{expression
(r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
-
- # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
- (r'(\{\{)([#~/]+)([^\s}]*)', bygroups(Comment.Preproc, Number.Attribute,Number.Attribute), 'tag'),
+
+ # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
+ (r'(\{\{)([#~/]+)([^\s}]*)', bygroups(Comment.Preproc, Number.Attribute,Number.Attribute), 'tag'),
(r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
],
'tag': [
(r'\s+', Text),
- # HTML Escaping close }}}
+ # HTML Escaping close }}}
(r'\}\}\}', Comment.Special, '#pop'),
- # blockClose}}, includes optional tilde ~
- (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
+ # blockClose}}, includes optional tilde ~
+ (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
# {{opt=something}}
- (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
+ (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
# Partials {{> ...}}
(r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
@@ -1844,7 +1844,7 @@ class HandlebarsLexer(RegexLexer):
include('generic'),
],
'variable': [
- (r'[()/@a-zA-Z][\w-]*', Name.Variable),
+ (r'[()/@a-zA-Z][\w-]*', Name.Variable),
(r'\.[\w-]+', Name.Variable),
(r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
],
diff --git a/contrib/python/Pygments/py2/pygments/lexers/teraterm.py b/contrib/python/Pygments/py2/pygments/lexers/teraterm.py
index 1d7483da24..6a94c8b549 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/teraterm.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/teraterm.py
@@ -154,5 +154,5 @@ class TeraTermLexer(RegexLexer):
def analyse_text(text):
result = 0.0
if re.search(TeraTermLexer.tokens['commands'][0][0], text):
- result += 0.01
+ result += 0.01
return result
diff --git a/contrib/python/Pygments/py2/pygments/lexers/textedit.py b/contrib/python/Pygments/py2/pygments/lexers/textedit.py
index 3c6fb570df..f6b38fa200 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/textedit.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/textedit.py
@@ -102,9 +102,9 @@ class VimLexer(RegexLexer):
(r'[ \t]+', Text),
# TODO: regexes can have other delims
- (r'/[^/\\\n]*(?:\\[\s\S][^/\\\n]*)*/', String.Regex),
- (r'"[^"\\\n]*(?:\\[\s\S][^"\\\n]*)*"', String.Double),
- (r"'[^\n']*(?:''[^\n']*)*'", String.Single),
+ (r'/[^/\\\n]*(?:\\[\s\S][^/\\\n]*)*/', String.Regex),
+ (r'"[^"\\\n]*(?:\\[\s\S][^"\\\n]*)*"', String.Double),
+ (r"'[^\n']*(?:''[^\n']*)*'", String.Single),
# Who decided that doublequote was a good comment character??
(r'(?<=\s)"[^\-:.%#=*].*', Comment),
diff --git a/contrib/python/Pygments/py2/pygments/lexers/textfmts.py b/contrib/python/Pygments/py2/pygments/lexers/textfmts.py
index d3a191b08f..3a6b47034a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/textfmts.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/textfmts.py
@@ -11,14 +11,14 @@
import re
-from pygments.lexers import guess_lexer, get_lexer_by_name
-from pygments.lexer import RegexLexer, bygroups, default, do_insertions
+from pygments.lexers import guess_lexer, get_lexer_by_name
+from pygments.lexer import RegexLexer, bygroups, default, do_insertions
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Generic, Literal, Punctuation
+ Number, Generic, Literal, Punctuation
from pygments.util import ClassNotFound
-__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer',
- 'NotmuchLexer']
+__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer',
+ 'NotmuchLexer']
class IrcLogsLexer(RegexLexer):
@@ -297,86 +297,86 @@ class TodotxtLexer(RegexLexer):
(r'\s+', IncompleteTaskText),
],
}
-
-
-class NotmuchLexer(RegexLexer):
- """
- For `Notmuch <https://notmuchmail.org/>`_ email text format.
-
- .. versionadded:: 2.5
-
- Additional options accepted:
-
- `body_lexer`
- If given, highlight the contents of the message body with the specified
- lexer, else guess it according to the body content (default: ``None``).
- """
-
- name = 'Notmuch'
- aliases = ['notmuch']
-
- def _highlight_code(self, match):
- code = match.group(1)
-
- try:
- if self.body_lexer:
- lexer = get_lexer_by_name(self.body_lexer)
- else:
- lexer = guess_lexer(code.strip())
- except ClassNotFound:
- lexer = get_lexer_by_name('text')
-
- for item in lexer.get_tokens_unprocessed(code):
- yield item
-
- tokens = {
- 'root': [
- (r'\fmessage{\s*', Keyword, ('message', 'message-attr')),
- ],
- 'message-attr': [
- (r'(\s*id:\s*)([^\s]+)', bygroups(Name.Attribute, String)),
- (r'(\s*(?:depth|match|excluded):\s*)(\d+)',
- bygroups(Name.Attribute, Number.Integer)),
- (r'(\s*filename:\s*)(.+\n)',
- bygroups(Name.Attribute, String)),
- default('#pop'),
- ],
- 'message': [
- (r'\fmessage}\n', Keyword, '#pop'),
- (r'\fheader{\n', Keyword, 'header'),
- (r'\fbody{\n', Keyword, 'body'),
- ],
- 'header': [
- (r'\fheader}\n', Keyword, '#pop'),
- (r'((?:Subject|From|To|Cc|Date):\s*)(.*\n)',
- bygroups(Name.Attribute, String)),
- (r'(.*)(\s*\(.*\))(\s*\(.*\)\n)',
- bygroups(Generic.Strong, Literal, Name.Tag)),
- ],
- 'body': [
- (r'\fpart{\n', Keyword, 'part'),
- (r'\f(part|attachment){\s*', Keyword, ('part', 'part-attr')),
- (r'\fbody}\n', Keyword, '#pop'),
- ],
- 'part-attr': [
- (r'(ID:\s*)(\d+)', bygroups(Name.Attribute, Number.Integer)),
- (r'(,\s*)((?:Filename|Content-id):\s*)([^,]+)',
- bygroups(Punctuation, Name.Attribute, String)),
- (r'(,\s*)(Content-type:\s*)(.+\n)',
- bygroups(Punctuation, Name.Attribute, String)),
- default('#pop'),
- ],
- 'part': [
- (r'\f(?:part|attachment)}\n', Keyword, '#pop'),
- (r'\f(?:part|attachment){\s*', Keyword, ('#push', 'part-attr')),
- (r'^Non-text part: .*\n', Comment),
- (r'(?s)(.*?(?=\f(?:part|attachment)}\n))', _highlight_code),
- ],
- }
-
- def analyse_text(text):
- return 1.0 if text.startswith('\fmessage{') else 0.0
-
- def __init__(self, **options):
- self.body_lexer = options.get('body_lexer', None)
- RegexLexer.__init__(self, **options)
+
+
+class NotmuchLexer(RegexLexer):
+ """
+ For `Notmuch <https://notmuchmail.org/>`_ email text format.
+
+ .. versionadded:: 2.5
+
+ Additional options accepted:
+
+ `body_lexer`
+ If given, highlight the contents of the message body with the specified
+ lexer, else guess it according to the body content (default: ``None``).
+ """
+
+ name = 'Notmuch'
+ aliases = ['notmuch']
+
+ def _highlight_code(self, match):
+ code = match.group(1)
+
+ try:
+ if self.body_lexer:
+ lexer = get_lexer_by_name(self.body_lexer)
+ else:
+ lexer = guess_lexer(code.strip())
+ except ClassNotFound:
+ lexer = get_lexer_by_name('text')
+
+ for item in lexer.get_tokens_unprocessed(code):
+ yield item
+
+ tokens = {
+ 'root': [
+ (r'\fmessage{\s*', Keyword, ('message', 'message-attr')),
+ ],
+ 'message-attr': [
+ (r'(\s*id:\s*)([^\s]+)', bygroups(Name.Attribute, String)),
+ (r'(\s*(?:depth|match|excluded):\s*)(\d+)',
+ bygroups(Name.Attribute, Number.Integer)),
+ (r'(\s*filename:\s*)(.+\n)',
+ bygroups(Name.Attribute, String)),
+ default('#pop'),
+ ],
+ 'message': [
+ (r'\fmessage}\n', Keyword, '#pop'),
+ (r'\fheader{\n', Keyword, 'header'),
+ (r'\fbody{\n', Keyword, 'body'),
+ ],
+ 'header': [
+ (r'\fheader}\n', Keyword, '#pop'),
+ (r'((?:Subject|From|To|Cc|Date):\s*)(.*\n)',
+ bygroups(Name.Attribute, String)),
+ (r'(.*)(\s*\(.*\))(\s*\(.*\)\n)',
+ bygroups(Generic.Strong, Literal, Name.Tag)),
+ ],
+ 'body': [
+ (r'\fpart{\n', Keyword, 'part'),
+ (r'\f(part|attachment){\s*', Keyword, ('part', 'part-attr')),
+ (r'\fbody}\n', Keyword, '#pop'),
+ ],
+ 'part-attr': [
+ (r'(ID:\s*)(\d+)', bygroups(Name.Attribute, Number.Integer)),
+ (r'(,\s*)((?:Filename|Content-id):\s*)([^,]+)',
+ bygroups(Punctuation, Name.Attribute, String)),
+ (r'(,\s*)(Content-type:\s*)(.+\n)',
+ bygroups(Punctuation, Name.Attribute, String)),
+ default('#pop'),
+ ],
+ 'part': [
+ (r'\f(?:part|attachment)}\n', Keyword, '#pop'),
+ (r'\f(?:part|attachment){\s*', Keyword, ('#push', 'part-attr')),
+ (r'^Non-text part: .*\n', Comment),
+ (r'(?s)(.*?(?=\f(?:part|attachment)}\n))', _highlight_code),
+ ],
+ }
+
+ def analyse_text(text):
+ return 1.0 if text.startswith('\fmessage{') else 0.0
+
+ def __init__(self, **options):
+ self.body_lexer = options.get('body_lexer', None)
+ RegexLexer.__init__(self, **options)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/zig.py b/contrib/python/Pygments/py2/pygments/lexers/zig.py
index c989386285..cecb4adbd1 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/zig.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/zig.py
@@ -1,129 +1,129 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.zig
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for Zig.
-
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['ZigLexer']
-
-
-class ZigLexer(RegexLexer):
- """
- For `Zig <http://www.ziglang.org>`_ source code.
-
- grammar: https://ziglang.org/documentation/master/#Grammar
- """
- name = 'Zig'
- aliases = ['zig']
- filenames = ['*.zig']
- mimetypes = ['text/zig']
-
- type_keywords = (
- words(('bool', 'f16', 'f32', 'f64', 'f128', 'void', 'noreturn', 'type',
- 'anyerror', 'promise', 'i0', 'u0', 'isize', 'usize', 'comptime_int',
- 'comptime_float', 'c_short', 'c_ushort', 'c_int', 'c_uint', 'c_long',
- 'c_ulong', 'c_longlong', 'c_ulonglong', 'c_longdouble', 'c_void'
- 'i8', 'u8', 'i16', 'u16', 'i32', 'u32', 'i64', 'u64', 'i128',
- 'u128'), suffix=r'\b'),
- Keyword.Type)
-
- storage_keywords = (
- words(('const', 'var', 'extern', 'packed', 'export', 'pub', 'noalias',
- 'inline', 'comptime', 'nakedcc', 'stdcallcc', 'volatile', 'allowzero',
- 'align', 'linksection', 'threadlocal'), suffix=r'\b'),
- Keyword.Reserved)
-
- structure_keywords = (
- words(('struct', 'enum', 'union', 'error'), suffix=r'\b'),
- Keyword)
-
- statement_keywords = (
- words(('break', 'return', 'continue', 'asm', 'defer', 'errdefer',
- 'unreachable', 'try', 'catch', 'async', 'await', 'suspend',
- 'resume', 'cancel'), suffix=r'\b'),
- Keyword)
-
- conditional_keywords = (
- words(('if', 'else', 'switch', 'and', 'or', 'orelse'), suffix=r'\b'),
- Keyword)
-
- repeat_keywords = (
- words(('while', 'for'), suffix=r'\b'),
- Keyword)
-
- other_keywords = (
- words(('fn', 'usingnamespace', 'test'), suffix=r'\b'),
- Keyword)
-
- constant_keywords = (
- words(('true', 'false', 'null', 'undefined'), suffix=r'\b'),
- Keyword.Constant)
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'//.*?\n', Comment.Single),
-
- # Keywords
- statement_keywords,
- storage_keywords,
- structure_keywords,
- repeat_keywords,
- type_keywords,
- constant_keywords,
- conditional_keywords,
- other_keywords,
-
- # Floats
- (r'0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?', Number.Float),
- (r'0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+', Number.Float),
- (r'[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?', Number.Float),
- (r'[0-9]+\.?[eE][-+]?[0-9]+', Number.Float),
-
- # Integers
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- # Identifier
- (r'@[a-zA-Z_]\w*', Name.Builtin),
- (r'[a-zA-Z_]\w*', Name),
-
- # Characters
- (r'\'\\\'\'', String.Escape),
- (r'\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'',
- String.Escape),
- (r'\'[^\\\']\'', String),
-
- # Strings
- (r'\\\\[^\n]*', String.Heredoc),
- (r'c\\\\[^\n]*', String.Heredoc),
- (r'c?"', String, 'string'),
-
- # Operators, Punctuation
- (r'[+%=><|^!?/\-*&~:]', Operator),
- (r'[{}()\[\],.;]', Punctuation)
- ],
- 'string': [
- (r'\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])',
- String.Escape),
- (r'[^\\"\n]+', String),
- (r'"', String, '#pop')
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- yield index, token, value
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.zig
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Zig.
+
+ :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace
+
+__all__ = ['ZigLexer']
+
+
+class ZigLexer(RegexLexer):
+ """
+ For `Zig <http://www.ziglang.org>`_ source code.
+
+ grammar: https://ziglang.org/documentation/master/#Grammar
+ """
+ name = 'Zig'
+ aliases = ['zig']
+ filenames = ['*.zig']
+ mimetypes = ['text/zig']
+
+ type_keywords = (
+ words(('bool', 'f16', 'f32', 'f64', 'f128', 'void', 'noreturn', 'type',
+ 'anyerror', 'promise', 'i0', 'u0', 'isize', 'usize', 'comptime_int',
+ 'comptime_float', 'c_short', 'c_ushort', 'c_int', 'c_uint', 'c_long',
+ 'c_ulong', 'c_longlong', 'c_ulonglong', 'c_longdouble', 'c_void'
+ 'i8', 'u8', 'i16', 'u16', 'i32', 'u32', 'i64', 'u64', 'i128',
+ 'u128'), suffix=r'\b'),
+ Keyword.Type)
+
+ storage_keywords = (
+ words(('const', 'var', 'extern', 'packed', 'export', 'pub', 'noalias',
+ 'inline', 'comptime', 'nakedcc', 'stdcallcc', 'volatile', 'allowzero',
+ 'align', 'linksection', 'threadlocal'), suffix=r'\b'),
+ Keyword.Reserved)
+
+ structure_keywords = (
+ words(('struct', 'enum', 'union', 'error'), suffix=r'\b'),
+ Keyword)
+
+ statement_keywords = (
+ words(('break', 'return', 'continue', 'asm', 'defer', 'errdefer',
+ 'unreachable', 'try', 'catch', 'async', 'await', 'suspend',
+ 'resume', 'cancel'), suffix=r'\b'),
+ Keyword)
+
+ conditional_keywords = (
+ words(('if', 'else', 'switch', 'and', 'or', 'orelse'), suffix=r'\b'),
+ Keyword)
+
+ repeat_keywords = (
+ words(('while', 'for'), suffix=r'\b'),
+ Keyword)
+
+ other_keywords = (
+ words(('fn', 'usingnamespace', 'test'), suffix=r'\b'),
+ Keyword)
+
+ constant_keywords = (
+ words(('true', 'false', 'null', 'undefined'), suffix=r'\b'),
+ Keyword.Constant)
+
+ tokens = {
+ 'root': [
+ (r'\n', Whitespace),
+ (r'\s+', Whitespace),
+ (r'//.*?\n', Comment.Single),
+
+ # Keywords
+ statement_keywords,
+ storage_keywords,
+ structure_keywords,
+ repeat_keywords,
+ type_keywords,
+ constant_keywords,
+ conditional_keywords,
+ other_keywords,
+
+ # Floats
+ (r'0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?', Number.Float),
+ (r'0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+', Number.Float),
+ (r'[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?', Number.Float),
+ (r'[0-9]+\.?[eE][-+]?[0-9]+', Number.Float),
+
+ # Integers
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+
+ # Identifier
+ (r'@[a-zA-Z_]\w*', Name.Builtin),
+ (r'[a-zA-Z_]\w*', Name),
+
+ # Characters
+ (r'\'\\\'\'', String.Escape),
+ (r'\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'',
+ String.Escape),
+ (r'\'[^\\\']\'', String),
+
+ # Strings
+ (r'\\\\[^\n]*', String.Heredoc),
+ (r'c\\\\[^\n]*', String.Heredoc),
+ (r'c?"', String, 'string'),
+
+ # Operators, Punctuation
+ (r'[+%=><|^!?/\-*&~:]', Operator),
+ (r'[{}()\[\],.;]', Punctuation)
+ ],
+ 'string': [
+ (r'\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])',
+ String.Escape),
+ (r'[^\\"\n]+', String),
+ (r'"', String, '#pop')
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ yield index, token, value
diff --git a/contrib/python/Pygments/py2/pygments/styles/__init__.py b/contrib/python/Pygments/py2/pygments/styles/__init__.py
index c0614718a2..199eb534fd 100644
--- a/contrib/python/Pygments/py2/pygments/styles/__init__.py
+++ b/contrib/python/Pygments/py2/pygments/styles/__init__.py
@@ -50,7 +50,7 @@ STYLE_MAP = {
'stata': 'stata_light::StataLightStyle',
'stata-light': 'stata_light::StataLightStyle',
'stata-dark': 'stata_dark::StataDarkStyle',
- 'inkpot': 'inkpot::InkPotStyle',
+ 'inkpot': 'inkpot::InkPotStyle',
}
diff --git a/contrib/python/Pygments/py2/pygments/styles/inkpot.py b/contrib/python/Pygments/py2/pygments/styles/inkpot.py
index 0b7ea74ed9..c2f98de4e3 100644
--- a/contrib/python/Pygments/py2/pygments/styles/inkpot.py
+++ b/contrib/python/Pygments/py2/pygments/styles/inkpot.py
@@ -1,67 +1,67 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.inkpot
- ~~~~~~~~~~~~~~~~~~~~~~
-
- A highlighting style for Pygments, inspired by the Inkpot theme for VIM.
-
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Text, Other, Keyword, Name, Comment, String, \
- Error, Number, Operator, Generic, Whitespace, Punctuation
-
-
-class InkPotStyle(Style):
- background_color = "#1e1e27"
- default_style = ""
- styles = {
- Text: "#cfbfad",
- Other: "#cfbfad",
- Whitespace: "#434357",
- Comment: "#cd8b00",
- Comment.Preproc: "#409090",
- Comment.PreprocFile: "bg:#404040 #ffcd8b",
- Comment.Special: "#808bed",
-
- Keyword: "#808bed",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "#ff8bff",
-
- Operator: "#666666",
-
- Punctuation: "#cfbfad",
-
- Name: "#cfbfad",
- Name.Attribute: "#cfbfad",
- Name.Builtin.Pseudo: '#ffff00',
- Name.Builtin: "#808bed",
- Name.Class: "#ff8bff",
- Name.Constant: "#409090",
- Name.Decorator: "#409090",
- Name.Exception: "#ff0000",
- Name.Function: "#c080d0",
- Name.Label: "#808bed",
- Name.Namespace: "#ff0000",
- Name.Variable: "#cfbfad",
-
- String: "bg:#404040 #ffcd8b",
- String.Doc: "#808bed",
-
- Number: "#f0ad6d",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #000080",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "bg:#6e2e2e #ffffff"
- }
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.inkpot
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ A highlighting style for Pygments, inspired by the Inkpot theme for VIM.
+
+ :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Text, Other, Keyword, Name, Comment, String, \
+ Error, Number, Operator, Generic, Whitespace, Punctuation
+
+
+class InkPotStyle(Style):
+ background_color = "#1e1e27"
+ default_style = ""
+ styles = {
+ Text: "#cfbfad",
+ Other: "#cfbfad",
+ Whitespace: "#434357",
+ Comment: "#cd8b00",
+ Comment.Preproc: "#409090",
+ Comment.PreprocFile: "bg:#404040 #ffcd8b",
+ Comment.Special: "#808bed",
+
+ Keyword: "#808bed",
+ Keyword.Pseudo: "nobold",
+ Keyword.Type: "#ff8bff",
+
+ Operator: "#666666",
+
+ Punctuation: "#cfbfad",
+
+ Name: "#cfbfad",
+ Name.Attribute: "#cfbfad",
+ Name.Builtin.Pseudo: '#ffff00',
+ Name.Builtin: "#808bed",
+ Name.Class: "#ff8bff",
+ Name.Constant: "#409090",
+ Name.Decorator: "#409090",
+ Name.Exception: "#ff0000",
+ Name.Function: "#c080d0",
+ Name.Label: "#808bed",
+ Name.Namespace: "#ff0000",
+ Name.Variable: "#cfbfad",
+
+ String: "bg:#404040 #ffcd8b",
+ String.Doc: "#808bed",
+
+ Number: "#f0ad6d",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #000080",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "bg:#6e2e2e #ffffff"
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/monokai.py b/contrib/python/Pygments/py2/pygments/styles/monokai.py
index c9db9f2218..b17124ee48 100644
--- a/contrib/python/Pygments/py2/pygments/styles/monokai.py
+++ b/contrib/python/Pygments/py2/pygments/styles/monokai.py
@@ -92,15 +92,15 @@ class MonokaiStyle(Style):
String.Single: "", # class: 's1'
String.Symbol: "", # class: 'ss'
-
+
Generic: "", # class: 'g'
Generic.Deleted: "#f92672", # class: 'gd',
Generic.Emph: "italic", # class: 'ge'
Generic.Error: "", # class: 'gr'
Generic.Heading: "", # class: 'gh'
Generic.Inserted: "#a6e22e", # class: 'gi'
- Generic.Output: "#66d9ef", # class: 'go'
- Generic.Prompt: "bold #f92672", # class: 'gp'
+ Generic.Output: "#66d9ef", # class: 'go'
+ Generic.Prompt: "bold #f92672", # class: 'gp'
Generic.Strong: "bold", # class: 'gs'
Generic.Subheading: "#75715e", # class: 'gu'
Generic.Traceback: "", # class: 'gt'
diff --git a/contrib/python/Pygments/py2/ya.make b/contrib/python/Pygments/py2/ya.make
index 978caf3d68..cf5d52a2ae 100644
--- a/contrib/python/Pygments/py2/ya.make
+++ b/contrib/python/Pygments/py2/ya.make
@@ -1,15 +1,15 @@
-# Generated by devtools/yamaker (pypi).
-
+# Generated by devtools/yamaker (pypi).
+
PY2_LIBRARY()
-OWNER(blinkov g:python-contrib)
-
-VERSION(2.5.2)
-
-LICENSE(BSD-3-Clause)
+OWNER(blinkov g:python-contrib)
-NO_LINT()
+VERSION(2.5.2)
+LICENSE(BSD-3-Clause)
+
+NO_LINT()
+
NO_CHECK_IMPORTS(
pygments.sphinxext
)
@@ -17,7 +17,7 @@ NO_CHECK_IMPORTS(
PY_SRCS(
TOP_LEVEL
pygments/__init__.py
- pygments/__main__.py
+ pygments/__main__.py
pygments/cmdline.py
pygments/console.py
pygments/filter.py
@@ -89,7 +89,7 @@ PY_SRCS(
pygments/lexers/ecl.py
pygments/lexers/eiffel.py
pygments/lexers/elm.py
- pygments/lexers/email.py
+ pygments/lexers/email.py
pygments/lexers/erlang.py
pygments/lexers/esoteric.py
pygments/lexers/ezhil.py
@@ -126,7 +126,7 @@ PY_SRCS(
pygments/lexers/markup.py
pygments/lexers/math.py
pygments/lexers/matlab.py
- pygments/lexers/mime.py
+ pygments/lexers/mime.py
pygments/lexers/ml.py
pygments/lexers/modeling.py
pygments/lexers/modula2.py
@@ -160,7 +160,7 @@ PY_SRCS(
pygments/lexers/ruby.py
pygments/lexers/rust.py
pygments/lexers/sas.py
- pygments/lexers/scdoc.py
+ pygments/lexers/scdoc.py
pygments/lexers/scripting.py
pygments/lexers/sgf.py
pygments/lexers/shell.py
@@ -168,7 +168,7 @@ PY_SRCS(
pygments/lexers/smalltalk.py
pygments/lexers/smv.py
pygments/lexers/snobol.py
- pygments/lexers/solidity.py
+ pygments/lexers/solidity.py
pygments/lexers/special.py
pygments/lexers/sql.py
pygments/lexers/stata.py
@@ -192,7 +192,7 @@ PY_SRCS(
pygments/lexers/whiley.py
pygments/lexers/x10.py
pygments/lexers/xorg.py
- pygments/lexers/zig.py
+ pygments/lexers/zig.py
pygments/modeline.py
pygments/plugin.py
pygments/regexopt.py
@@ -213,7 +213,7 @@ PY_SRCS(
pygments/styles/friendly.py
pygments/styles/fruity.py
pygments/styles/igor.py
- pygments/styles/inkpot.py
+ pygments/styles/inkpot.py
pygments/styles/lovelace.py
pygments/styles/manni.py
pygments/styles/monokai.py
@@ -240,7 +240,7 @@ PY_SRCS(
)
RESOURCE_FILES(
- PREFIX contrib/python/Pygments/py2/
+ PREFIX contrib/python/Pygments/py2/
.dist-info/METADATA
.dist-info/entry_points.txt
.dist-info/top_level.txt
diff --git a/contrib/python/Pygments/py3/.dist-info/METADATA b/contrib/python/Pygments/py3/.dist-info/METADATA
index d1bac735c8..592fea9fc2 100644
--- a/contrib/python/Pygments/py3/.dist-info/METADATA
+++ b/contrib/python/Pygments/py3/.dist-info/METADATA
@@ -2,7 +2,7 @@ Metadata-Version: 2.1
Name: Pygments
Version: 2.11.2
Summary: Pygments is a syntax highlighting package written in Python.
-Home-page: https://pygments.org/
+Home-page: https://pygments.org/
Author: Georg Brandl
Author-email: georg@python.org
License: BSD License
@@ -12,25 +12,25 @@ Project-URL: Bug Tracker, https://github.com/pygments/pygments/issues
Project-URL: Changelog, https://github.com/pygments/pygments/blob/master/CHANGES
Keywords: syntax highlighting
Platform: any
-Classifier: Development Status :: 6 - Mature
+Classifier: Development Status :: 6 - Mature
Classifier: Intended Audience :: Developers
Classifier: Intended Audience :: End Users/Desktop
Classifier: Intended Audience :: System Administrators
-Classifier: License :: OSI Approved :: BSD License
-Classifier: Operating System :: OS Independent
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
-Classifier: Programming Language :: Python :: Implementation :: CPython
-Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Text Processing :: Filters
Classifier: Topic :: Utilities
-Requires-Python: >=3.5
+Requires-Python: >=3.5
License-File: LICENSE
License-File: AUTHORS
diff --git a/contrib/python/Pygments/py3/AUTHORS b/contrib/python/Pygments/py3/AUTHORS
index d33be64878..dacb141079 100644
--- a/contrib/python/Pygments/py3/AUTHORS
+++ b/contrib/python/Pygments/py3/AUTHORS
@@ -1,257 +1,257 @@
-Pygments is written and maintained by Georg Brandl <georg@python.org>.
-
-Major developers are Tim Hatch <tim@timhatch.com> and Armin Ronacher
-<armin.ronacher@active-4.com>.
-
-Other contributors, listed alphabetically, are:
-
-* Sam Aaron -- Ioke lexer
+Pygments is written and maintained by Georg Brandl <georg@python.org>.
+
+Major developers are Tim Hatch <tim@timhatch.com> and Armin Ronacher
+<armin.ronacher@active-4.com>.
+
+Other contributors, listed alphabetically, are:
+
+* Sam Aaron -- Ioke lexer
* Jean Abou Samra -- LilyPond lexer
-* João Abecasis -- JSLT lexer
-* Ali Afshar -- image formatter
-* Thomas Aglassinger -- Easytrieve, JCL, Rexx, Transact-SQL and VBScript
- lexers
-* Muthiah Annamalai -- Ezhil lexer
-* Kumar Appaiah -- Debian control lexer
-* Andreas Amann -- AppleScript lexer
-* Timothy Armstrong -- Dart lexer fixes
-* Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers
-* Jeremy Ashkenas -- CoffeeScript lexer
-* José Joaquín Atria -- Praat lexer
-* Stefan Matthias Aust -- Smalltalk lexer
-* Lucas Bajolet -- Nit lexer
-* Ben Bangert -- Mako lexers
-* Max Battcher -- Darcs patch lexer
-* Thomas Baruchel -- APL lexer
-* Tim Baumann -- (Literate) Agda lexer
-* Paul Baumgart, 280 North, Inc. -- Objective-J lexer
-* Michael Bayer -- Myghty lexers
-* Thomas Beale -- Archetype lexers
-* John Benediktsson -- Factor lexer
-* Trevor Bergeron -- mIRC formatter
-* Vincent Bernat -- LessCSS lexer
-* Christopher Bertels -- Fancy lexer
-* Sébastien Bigaret -- QVT Operational lexer
-* Jarrett Billingsley -- MiniD lexer
-* Adam Blinkinsop -- Haskell, Redcode lexers
-* Stéphane Blondon -- Procfile, SGF and Sieve lexers
-* Frits van Bommel -- assembler lexers
-* Pierre Bourdon -- bugfixes
-* Martijn Braam -- Kernel log lexer, BARE lexer
-* Matthias Bussonnier -- ANSI style handling for terminal-256 formatter
-* chebee7i -- Python traceback lexer improvements
-* Hiram Chirino -- Scaml and Jade lexers
-* Mauricio Caceres -- SAS and Stata lexers.
-* Ian Cooper -- VGL lexer
-* David Corbett -- Inform, Jasmin, JSGF, Snowball, and TADS 3 lexers
-* Leaf Corcoran -- MoonScript lexer
-* Christopher Creutzig -- MuPAD lexer
-* Daniël W. Crompton -- Pike lexer
-* Pete Curry -- bugfixes
-* Bryan Davis -- EBNF lexer
-* Bruno Deferrari -- Shen lexer
-* Luke Drummond -- Meson lexer
-* Giedrius Dubinskas -- HTML formatter improvements
-* Owen Durni -- Haxe lexer
-* Alexander Dutton, Oxford University Computing Services -- SPARQL lexer
-* James Edwards -- Terraform lexer
-* Nick Efford -- Python 3 lexer
-* Sven Efftinge -- Xtend lexer
-* Artem Egorkine -- terminal256 formatter
-* Matthew Fernandez -- CAmkES lexer
-* Paweł Fertyk -- GDScript lexer, HTML formatter improvements
-* Michael Ficarra -- CPSA lexer
-* James H. Fisher -- PostScript lexer
-* William S. Fulton -- SWIG lexer
-* Carlos Galdino -- Elixir and Elixir Console lexers
-* Michael Galloy -- IDL lexer
-* Naveen Garg -- Autohotkey lexer
-* Simon Garnotel -- FreeFem++ lexer
-* Laurent Gautier -- R/S lexer
-* Alex Gaynor -- PyPy log lexer
-* Richard Gerkin -- Igor Pro lexer
-* Alain Gilbert -- TypeScript lexer
-* Alex Gilding -- BlitzBasic lexer
-* GitHub, Inc -- DASM16, Augeas, TOML, and Slash lexers
-* Bertrand Goetzmann -- Groovy lexer
-* Krzysiek Goj -- Scala lexer
-* Rostyslav Golda -- FloScript lexer
-* Andrey Golovizin -- BibTeX lexers
-* Matt Good -- Genshi, Cheetah lexers
-* Michał Górny -- vim modeline support
-* Alex Gosse -- TrafficScript lexer
-* Patrick Gotthardt -- PHP namespaces support
-* Hubert Gruniaux -- C and C++ lexer improvements
-* Olivier Guibe -- Asymptote lexer
-* Phil Hagelberg -- Fennel lexer
-* Florian Hahn -- Boogie lexer
-* Martin Harriman -- SNOBOL lexer
-* Matthew Harrison -- SVG formatter
-* Steven Hazel -- Tcl lexer
-* Dan Michael Heggø -- Turtle lexer
-* Aslak Hellesøy -- Gherkin lexer
-* Greg Hendershott -- Racket lexer
-* Justin Hendrick -- ParaSail lexer
-* Jordi Gutiérrez Hermoso -- Octave lexer
-* David Hess, Fish Software, Inc. -- Objective-J lexer
-* Ken Hilton -- Typographic Number Theory and Arrow lexers
-* Varun Hiremath -- Debian control lexer
-* Rob Hoelz -- Perl 6 lexer
-* Doug Hogan -- Mscgen lexer
-* Ben Hollis -- Mason lexer
-* Max Horn -- GAP lexer
-* Fred Hornsey -- OMG IDL Lexer
-* Alastair Houghton -- Lexer inheritance facility
-* Tim Howard -- BlitzMax lexer
-* Dustin Howett -- Logos lexer
-* Ivan Inozemtsev -- Fantom lexer
-* Hiroaki Itoh -- Shell console rewrite, Lexers for PowerShell session,
- MSDOS session, BC, WDiff
-* Brian R. Jackson -- Tea lexer
-* Christian Jann -- ShellSession lexer
-* Dennis Kaarsemaker -- sources.list lexer
-* Dmitri Kabak -- Inferno Limbo lexer
-* Igor Kalnitsky -- vhdl lexer
-* Colin Kennedy - USD lexer
-* Alexander Kit -- MaskJS lexer
-* Pekka Klärck -- Robot Framework lexer
-* Gerwin Klein -- Isabelle lexer
-* Eric Knibbe -- Lasso lexer
-* Stepan Koltsov -- Clay lexer
+* João Abecasis -- JSLT lexer
+* Ali Afshar -- image formatter
+* Thomas Aglassinger -- Easytrieve, JCL, Rexx, Transact-SQL and VBScript
+ lexers
+* Muthiah Annamalai -- Ezhil lexer
+* Kumar Appaiah -- Debian control lexer
+* Andreas Amann -- AppleScript lexer
+* Timothy Armstrong -- Dart lexer fixes
+* Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers
+* Jeremy Ashkenas -- CoffeeScript lexer
+* José Joaquín Atria -- Praat lexer
+* Stefan Matthias Aust -- Smalltalk lexer
+* Lucas Bajolet -- Nit lexer
+* Ben Bangert -- Mako lexers
+* Max Battcher -- Darcs patch lexer
+* Thomas Baruchel -- APL lexer
+* Tim Baumann -- (Literate) Agda lexer
+* Paul Baumgart, 280 North, Inc. -- Objective-J lexer
+* Michael Bayer -- Myghty lexers
+* Thomas Beale -- Archetype lexers
+* John Benediktsson -- Factor lexer
+* Trevor Bergeron -- mIRC formatter
+* Vincent Bernat -- LessCSS lexer
+* Christopher Bertels -- Fancy lexer
+* Sébastien Bigaret -- QVT Operational lexer
+* Jarrett Billingsley -- MiniD lexer
+* Adam Blinkinsop -- Haskell, Redcode lexers
+* Stéphane Blondon -- Procfile, SGF and Sieve lexers
+* Frits van Bommel -- assembler lexers
+* Pierre Bourdon -- bugfixes
+* Martijn Braam -- Kernel log lexer, BARE lexer
+* Matthias Bussonnier -- ANSI style handling for terminal-256 formatter
+* chebee7i -- Python traceback lexer improvements
+* Hiram Chirino -- Scaml and Jade lexers
+* Mauricio Caceres -- SAS and Stata lexers.
+* Ian Cooper -- VGL lexer
+* David Corbett -- Inform, Jasmin, JSGF, Snowball, and TADS 3 lexers
+* Leaf Corcoran -- MoonScript lexer
+* Christopher Creutzig -- MuPAD lexer
+* Daniël W. Crompton -- Pike lexer
+* Pete Curry -- bugfixes
+* Bryan Davis -- EBNF lexer
+* Bruno Deferrari -- Shen lexer
+* Luke Drummond -- Meson lexer
+* Giedrius Dubinskas -- HTML formatter improvements
+* Owen Durni -- Haxe lexer
+* Alexander Dutton, Oxford University Computing Services -- SPARQL lexer
+* James Edwards -- Terraform lexer
+* Nick Efford -- Python 3 lexer
+* Sven Efftinge -- Xtend lexer
+* Artem Egorkine -- terminal256 formatter
+* Matthew Fernandez -- CAmkES lexer
+* Paweł Fertyk -- GDScript lexer, HTML formatter improvements
+* Michael Ficarra -- CPSA lexer
+* James H. Fisher -- PostScript lexer
+* William S. Fulton -- SWIG lexer
+* Carlos Galdino -- Elixir and Elixir Console lexers
+* Michael Galloy -- IDL lexer
+* Naveen Garg -- Autohotkey lexer
+* Simon Garnotel -- FreeFem++ lexer
+* Laurent Gautier -- R/S lexer
+* Alex Gaynor -- PyPy log lexer
+* Richard Gerkin -- Igor Pro lexer
+* Alain Gilbert -- TypeScript lexer
+* Alex Gilding -- BlitzBasic lexer
+* GitHub, Inc -- DASM16, Augeas, TOML, and Slash lexers
+* Bertrand Goetzmann -- Groovy lexer
+* Krzysiek Goj -- Scala lexer
+* Rostyslav Golda -- FloScript lexer
+* Andrey Golovizin -- BibTeX lexers
+* Matt Good -- Genshi, Cheetah lexers
+* Michał Górny -- vim modeline support
+* Alex Gosse -- TrafficScript lexer
+* Patrick Gotthardt -- PHP namespaces support
+* Hubert Gruniaux -- C and C++ lexer improvements
+* Olivier Guibe -- Asymptote lexer
+* Phil Hagelberg -- Fennel lexer
+* Florian Hahn -- Boogie lexer
+* Martin Harriman -- SNOBOL lexer
+* Matthew Harrison -- SVG formatter
+* Steven Hazel -- Tcl lexer
+* Dan Michael Heggø -- Turtle lexer
+* Aslak Hellesøy -- Gherkin lexer
+* Greg Hendershott -- Racket lexer
+* Justin Hendrick -- ParaSail lexer
+* Jordi Gutiérrez Hermoso -- Octave lexer
+* David Hess, Fish Software, Inc. -- Objective-J lexer
+* Ken Hilton -- Typographic Number Theory and Arrow lexers
+* Varun Hiremath -- Debian control lexer
+* Rob Hoelz -- Perl 6 lexer
+* Doug Hogan -- Mscgen lexer
+* Ben Hollis -- Mason lexer
+* Max Horn -- GAP lexer
+* Fred Hornsey -- OMG IDL Lexer
+* Alastair Houghton -- Lexer inheritance facility
+* Tim Howard -- BlitzMax lexer
+* Dustin Howett -- Logos lexer
+* Ivan Inozemtsev -- Fantom lexer
+* Hiroaki Itoh -- Shell console rewrite, Lexers for PowerShell session,
+ MSDOS session, BC, WDiff
+* Brian R. Jackson -- Tea lexer
+* Christian Jann -- ShellSession lexer
+* Dennis Kaarsemaker -- sources.list lexer
+* Dmitri Kabak -- Inferno Limbo lexer
+* Igor Kalnitsky -- vhdl lexer
+* Colin Kennedy - USD lexer
+* Alexander Kit -- MaskJS lexer
+* Pekka Klärck -- Robot Framework lexer
+* Gerwin Klein -- Isabelle lexer
+* Eric Knibbe -- Lasso lexer
+* Stepan Koltsov -- Clay lexer
* Oliver Kopp - Friendly grayscale style
-* Adam Koprowski -- Opa lexer
-* Benjamin Kowarsch -- Modula-2 lexer
-* Domen Kožar -- Nix lexer
-* Oleh Krekel -- Emacs Lisp lexer
-* Alexander Kriegisch -- Kconfig and AspectJ lexers
-* Marek Kubica -- Scheme lexer
-* Jochen Kupperschmidt -- Markdown processor
-* Gerd Kurzbach -- Modelica lexer
-* Jon Larimer, Google Inc. -- Smali lexer
-* Olov Lassus -- Dart lexer
-* Matt Layman -- TAP lexer
-* Kristian Lyngstøl -- Varnish lexers
-* Sylvestre Ledru -- Scilab lexer
-* Chee Sing Lee -- Flatline lexer
-* Mark Lee -- Vala lexer
-* Valentin Lorentz -- C++ lexer improvements
-* Ben Mabey -- Gherkin lexer
-* Angus MacArthur -- QML lexer
-* Louis Mandel -- X10 lexer
-* Louis Marchand -- Eiffel lexer
-* Simone Margaritelli -- Hybris lexer
-* Kirk McDonald -- D lexer
-* Gordon McGregor -- SystemVerilog lexer
-* Stephen McKamey -- Duel/JBST lexer
-* Brian McKenna -- F# lexer
-* Charles McLaughlin -- Puppet lexer
-* Kurt McKee -- Tera Term macro lexer, PostgreSQL updates, MySQL overhaul
+* Adam Koprowski -- Opa lexer
+* Benjamin Kowarsch -- Modula-2 lexer
+* Domen Kožar -- Nix lexer
+* Oleh Krekel -- Emacs Lisp lexer
+* Alexander Kriegisch -- Kconfig and AspectJ lexers
+* Marek Kubica -- Scheme lexer
+* Jochen Kupperschmidt -- Markdown processor
+* Gerd Kurzbach -- Modelica lexer
+* Jon Larimer, Google Inc. -- Smali lexer
+* Olov Lassus -- Dart lexer
+* Matt Layman -- TAP lexer
+* Kristian Lyngstøl -- Varnish lexers
+* Sylvestre Ledru -- Scilab lexer
+* Chee Sing Lee -- Flatline lexer
+* Mark Lee -- Vala lexer
+* Valentin Lorentz -- C++ lexer improvements
+* Ben Mabey -- Gherkin lexer
+* Angus MacArthur -- QML lexer
+* Louis Mandel -- X10 lexer
+* Louis Marchand -- Eiffel lexer
+* Simone Margaritelli -- Hybris lexer
+* Kirk McDonald -- D lexer
+* Gordon McGregor -- SystemVerilog lexer
+* Stephen McKamey -- Duel/JBST lexer
+* Brian McKenna -- F# lexer
+* Charles McLaughlin -- Puppet lexer
+* Kurt McKee -- Tera Term macro lexer, PostgreSQL updates, MySQL overhaul
* Joe Eli McIlvain -- Savi lexer
-* Lukas Meuser -- BBCode formatter, Lua lexer
-* Cat Miller -- Pig lexer
-* Paul Miller -- LiveScript lexer
-* Hong Minhee -- HTTP lexer
-* Michael Mior -- Awk lexer
-* Bruce Mitchener -- Dylan lexer rewrite
-* Reuben Morais -- SourcePawn lexer
-* Jon Morton -- Rust lexer
-* Paulo Moura -- Logtalk lexer
-* Mher Movsisyan -- DTD lexer
-* Dejan Muhamedagic -- Crmsh lexer
-* Ana Nelson -- Ragel, ANTLR, R console lexers
-* Kurt Neufeld -- Markdown lexer
-* Nam T. Nguyen -- Monokai style
-* Jesper Noehr -- HTML formatter "anchorlinenos"
-* Mike Nolta -- Julia lexer
-* Avery Nortonsmith -- Pointless lexer
-* Jonas Obrist -- BBCode lexer
-* Edward O'Callaghan -- Cryptol lexer
-* David Oliva -- Rebol lexer
-* Pat Pannuto -- nesC lexer
-* Jon Parise -- Protocol buffers and Thrift lexers
-* Benjamin Peterson -- Test suite refactoring
-* Ronny Pfannschmidt -- BBCode lexer
-* Dominik Picheta -- Nimrod lexer
-* Andrew Pinkham -- RTF Formatter Refactoring
-* Clément Prévost -- UrbiScript lexer
-* Tanner Prynn -- cmdline -x option and loading lexers from files
-* Oleh Prypin -- Crystal lexer (based on Ruby lexer)
-* Xidorn Quan -- Web IDL lexer
-* Elias Rabel -- Fortran fixed form lexer
-* raichoo -- Idris lexer
-* Daniel Ramirez -- GDScript lexer
-* Kashif Rasul -- CUDA lexer
-* Nathan Reed -- HLSL lexer
-* Justin Reidy -- MXML lexer
-* Norman Richards -- JSON lexer
-* Corey Richardson -- Rust lexer updates
-* Lubomir Rintel -- GoodData MAQL and CL lexers
-* Andre Roberge -- Tango style
-* Georg Rollinger -- HSAIL lexer
-* Michiel Roos -- TypoScript lexer
-* Konrad Rudolph -- LaTeX formatter enhancements
-* Mario Ruggier -- Evoque lexers
-* Miikka Salminen -- Lovelace style, Hexdump lexer, lexer enhancements
-* Stou Sandalski -- NumPy, FORTRAN, tcsh and XSLT lexers
-* Matteo Sasso -- Common Lisp lexer
-* Joe Schafer -- Ada lexer
-* Max Schillinger -- TiddlyWiki5 lexer
-* Ken Schutte -- Matlab lexers
-* René Schwaiger -- Rainbow Dash style
-* Sebastian Schweizer -- Whiley lexer
-* Tassilo Schweyer -- Io, MOOCode lexers
-* Pablo Seminario -- PromQL lexer
-* Ted Shaw -- AutoIt lexer
-* Joerg Sieker -- ABAP lexer
-* Robert Simmons -- Standard ML lexer
-* Kirill Simonov -- YAML lexer
-* Corbin Simpson -- Monte lexer
-* Ville Skyttä -- ASCII armored lexer
-* Alexander Smishlajev -- Visual FoxPro lexer
-* Steve Spigarelli -- XQuery lexer
-* Jerome St-Louis -- eC lexer
-* Camil Staps -- Clean and NuSMV lexers; Solarized style
-* James Strachan -- Kotlin lexer
-* Tom Stuart -- Treetop lexer
-* Colin Sullivan -- SuperCollider lexer
-* Ben Swift -- Extempore lexer
-* tatt61880 -- Kuin lexer
-* Edoardo Tenani -- Arduino lexer
-* Tiberius Teng -- default style overhaul
-* Jeremy Thurgood -- Erlang, Squid config lexers
-* Brian Tiffin -- OpenCOBOL lexer
-* Bob Tolbert -- Hy lexer
-* Matthias Trute -- Forth lexer
+* Lukas Meuser -- BBCode formatter, Lua lexer
+* Cat Miller -- Pig lexer
+* Paul Miller -- LiveScript lexer
+* Hong Minhee -- HTTP lexer
+* Michael Mior -- Awk lexer
+* Bruce Mitchener -- Dylan lexer rewrite
+* Reuben Morais -- SourcePawn lexer
+* Jon Morton -- Rust lexer
+* Paulo Moura -- Logtalk lexer
+* Mher Movsisyan -- DTD lexer
+* Dejan Muhamedagic -- Crmsh lexer
+* Ana Nelson -- Ragel, ANTLR, R console lexers
+* Kurt Neufeld -- Markdown lexer
+* Nam T. Nguyen -- Monokai style
+* Jesper Noehr -- HTML formatter "anchorlinenos"
+* Mike Nolta -- Julia lexer
+* Avery Nortonsmith -- Pointless lexer
+* Jonas Obrist -- BBCode lexer
+* Edward O'Callaghan -- Cryptol lexer
+* David Oliva -- Rebol lexer
+* Pat Pannuto -- nesC lexer
+* Jon Parise -- Protocol buffers and Thrift lexers
+* Benjamin Peterson -- Test suite refactoring
+* Ronny Pfannschmidt -- BBCode lexer
+* Dominik Picheta -- Nimrod lexer
+* Andrew Pinkham -- RTF Formatter Refactoring
+* Clément Prévost -- UrbiScript lexer
+* Tanner Prynn -- cmdline -x option and loading lexers from files
+* Oleh Prypin -- Crystal lexer (based on Ruby lexer)
+* Xidorn Quan -- Web IDL lexer
+* Elias Rabel -- Fortran fixed form lexer
+* raichoo -- Idris lexer
+* Daniel Ramirez -- GDScript lexer
+* Kashif Rasul -- CUDA lexer
+* Nathan Reed -- HLSL lexer
+* Justin Reidy -- MXML lexer
+* Norman Richards -- JSON lexer
+* Corey Richardson -- Rust lexer updates
+* Lubomir Rintel -- GoodData MAQL and CL lexers
+* Andre Roberge -- Tango style
+* Georg Rollinger -- HSAIL lexer
+* Michiel Roos -- TypoScript lexer
+* Konrad Rudolph -- LaTeX formatter enhancements
+* Mario Ruggier -- Evoque lexers
+* Miikka Salminen -- Lovelace style, Hexdump lexer, lexer enhancements
+* Stou Sandalski -- NumPy, FORTRAN, tcsh and XSLT lexers
+* Matteo Sasso -- Common Lisp lexer
+* Joe Schafer -- Ada lexer
+* Max Schillinger -- TiddlyWiki5 lexer
+* Ken Schutte -- Matlab lexers
+* René Schwaiger -- Rainbow Dash style
+* Sebastian Schweizer -- Whiley lexer
+* Tassilo Schweyer -- Io, MOOCode lexers
+* Pablo Seminario -- PromQL lexer
+* Ted Shaw -- AutoIt lexer
+* Joerg Sieker -- ABAP lexer
+* Robert Simmons -- Standard ML lexer
+* Kirill Simonov -- YAML lexer
+* Corbin Simpson -- Monte lexer
+* Ville Skyttä -- ASCII armored lexer
+* Alexander Smishlajev -- Visual FoxPro lexer
+* Steve Spigarelli -- XQuery lexer
+* Jerome St-Louis -- eC lexer
+* Camil Staps -- Clean and NuSMV lexers; Solarized style
+* James Strachan -- Kotlin lexer
+* Tom Stuart -- Treetop lexer
+* Colin Sullivan -- SuperCollider lexer
+* Ben Swift -- Extempore lexer
+* tatt61880 -- Kuin lexer
+* Edoardo Tenani -- Arduino lexer
+* Tiberius Teng -- default style overhaul
+* Jeremy Thurgood -- Erlang, Squid config lexers
+* Brian Tiffin -- OpenCOBOL lexer
+* Bob Tolbert -- Hy lexer
+* Matthias Trute -- Forth lexer
* Tuoa Spi T4 -- Bdd lexer
-* Erick Tryzelaar -- Felix lexer
-* Alexander Udalov -- Kotlin lexer improvements
-* Thomas Van Doren -- Chapel lexer
-* Daniele Varrazzo -- PostgreSQL lexers
-* Abe Voelker -- OpenEdge ABL lexer
-* Pepijn de Vos -- HTML formatter CTags support
-* Matthias Vallentin -- Bro lexer
-* Benoît Vinot -- AMPL lexer
-* Linh Vu Hong -- RSL lexer
-* Immanuel Washington -- Smithy lexer
-* Nathan Weizenbaum -- Haml and Sass lexers
-* Nathan Whetsell -- Csound lexers
-* Dietmar Winkler -- Modelica lexer
-* Nils Winter -- Smalltalk lexer
-* Davy Wybiral -- Clojure lexer
-* Whitney Young -- ObjectiveC lexer
-* Diego Zamboni -- CFengine3 lexer
-* Enrique Zamudio -- Ceylon lexer
-* Alex Zimin -- Nemerle lexer
-* Rob Zimmerman -- Kal lexer
-* Vincent Zurczak -- Roboconf lexer
-* Hubert Gruniaux -- C and C++ lexer improvements
-* Thomas Symalla -- AMDGPU Lexer
-* 15b3 -- Image Formatter improvements
-* Fabian Neumann -- CDDL lexer
-* Thomas Duboucher -- CDDL lexer
-* Philipp Imhof -- Pango Markup formatter
+* Erick Tryzelaar -- Felix lexer
+* Alexander Udalov -- Kotlin lexer improvements
+* Thomas Van Doren -- Chapel lexer
+* Daniele Varrazzo -- PostgreSQL lexers
+* Abe Voelker -- OpenEdge ABL lexer
+* Pepijn de Vos -- HTML formatter CTags support
+* Matthias Vallentin -- Bro lexer
+* Benoît Vinot -- AMPL lexer
+* Linh Vu Hong -- RSL lexer
+* Immanuel Washington -- Smithy lexer
+* Nathan Weizenbaum -- Haml and Sass lexers
+* Nathan Whetsell -- Csound lexers
+* Dietmar Winkler -- Modelica lexer
+* Nils Winter -- Smalltalk lexer
+* Davy Wybiral -- Clojure lexer
+* Whitney Young -- ObjectiveC lexer
+* Diego Zamboni -- CFengine3 lexer
+* Enrique Zamudio -- Ceylon lexer
+* Alex Zimin -- Nemerle lexer
+* Rob Zimmerman -- Kal lexer
+* Vincent Zurczak -- Roboconf lexer
+* Hubert Gruniaux -- C and C++ lexer improvements
+* Thomas Symalla -- AMDGPU Lexer
+* 15b3 -- Image Formatter improvements
+* Fabian Neumann -- CDDL lexer
+* Thomas Duboucher -- CDDL lexer
+* Philipp Imhof -- Pango Markup formatter
* Thomas Voss -- Sed lexer
* Martin Fischer -- WCAG contrast testing
* Marc Auberer -- Spice lexer
-
-Many thanks for all contributions!
+
+Many thanks for all contributions!
diff --git a/contrib/python/Pygments/py3/LICENSE b/contrib/python/Pygments/py3/LICENSE
index e1b15663d9..fb44358e6b 100644
--- a/contrib/python/Pygments/py3/LICENSE
+++ b/contrib/python/Pygments/py3/LICENSE
@@ -1,4 +1,4 @@
-Copyright (c) 2006-2021 by the respective authors (see AUTHORS file).
+Copyright (c) 2006-2021 by the respective authors (see AUTHORS file).
All rights reserved.
Redistribution and use in source and binary forms, with or without
diff --git a/contrib/python/Pygments/py3/README.rst b/contrib/python/Pygments/py3/README.rst
index 77b2d565c0..463f8fc4ed 100644
--- a/contrib/python/Pygments/py3/README.rst
+++ b/contrib/python/Pygments/py3/README.rst
@@ -1,44 +1,44 @@
-Welcome to Pygments
-===================
-
-This is the source of Pygments. It is a **generic syntax highlighter** written
-in Python that supports over 500 languages and text formats, for use in code
-hosting, forums, wikis or other applications that need to prettify source code.
-
-Installing
-----------
-
-... works as usual, use ``pip install Pygments`` to get published versions,
-or ``python setup.py install`` to install from a checkout.
-
-Documentation
--------------
-
-... can be found online at https://pygments.org/ or created with Sphinx by ::
-
- cd doc
- make html
-
-Development
------------
-
-... takes place on `GitHub <https://github.com/pygments/pygments>`_, where the
-Git repository, tickets and pull requests can be viewed.
-
-Continuous testing runs on GitHub workflows:
-
-.. image:: https://github.com/pygments/pygments/workflows/Pygments/badge.svg
- :target: https://github.com/pygments/pygments/actions?query=workflow%3APygments
-
-The authors
------------
-
-Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*
-and **Matthäus Chajdas**.
-
-Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of
-the `Pocoo <https://www.pocoo.org/>`_ team and **Tim Hatch**.
-
-The code is distributed under the BSD 2-clause license. Contributors making pull
-requests must agree that they are able and willing to put their contributions
-under that license.
+Welcome to Pygments
+===================
+
+This is the source of Pygments. It is a **generic syntax highlighter** written
+in Python that supports over 500 languages and text formats, for use in code
+hosting, forums, wikis or other applications that need to prettify source code.
+
+Installing
+----------
+
+... works as usual, use ``pip install Pygments`` to get published versions,
+or ``python setup.py install`` to install from a checkout.
+
+Documentation
+-------------
+
+... can be found online at https://pygments.org/ or created with Sphinx by ::
+
+ cd doc
+ make html
+
+Development
+-----------
+
+... takes place on `GitHub <https://github.com/pygments/pygments>`_, where the
+Git repository, tickets and pull requests can be viewed.
+
+Continuous testing runs on GitHub workflows:
+
+.. image:: https://github.com/pygments/pygments/workflows/Pygments/badge.svg
+ :target: https://github.com/pygments/pygments/actions?query=workflow%3APygments
+
+The authors
+-----------
+
+Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*
+and **Matthäus Chajdas**.
+
+Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of
+the `Pocoo <https://www.pocoo.org/>`_ team and **Tim Hatch**.
+
+The code is distributed under the BSD 2-clause license. Contributors making pull
+requests must agree that they are able and willing to put their contributions
+under that license.
diff --git a/contrib/python/Pygments/py3/pygments/__init__.py b/contrib/python/Pygments/py3/pygments/__init__.py
index 22c50b356a..d827144aa8 100644
--- a/contrib/python/Pygments/py3/pygments/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/__init__.py
@@ -16,15 +16,15 @@
* it is usable as a command-line tool and as a library
* ... and it highlights even Brainfuck!
- The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
+ The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
- .. _Pygments master branch:
- https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
+ .. _Pygments master branch:
+ https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-from io import StringIO, BytesIO
+from io import StringIO, BytesIO
__version__ = '2.11.2'
__docformat__ = 'restructuredtext'
diff --git a/contrib/python/Pygments/py3/pygments/__main__.py b/contrib/python/Pygments/py3/pygments/__main__.py
index c6e2517dfe..ed9195552b 100644
--- a/contrib/python/Pygments/py3/pygments/__main__.py
+++ b/contrib/python/Pygments/py3/pygments/__main__.py
@@ -1,17 +1,17 @@
-"""
- pygments.__main__
- ~~~~~~~~~~~~~~~~~
-
- Main entry point for ``python -m pygments``.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import sys
-import pygments.cmdline
-
-try:
- sys.exit(pygments.cmdline.main(sys.argv))
-except KeyboardInterrupt:
- sys.exit(1)
+"""
+ pygments.__main__
+ ~~~~~~~~~~~~~~~~~
+
+ Main entry point for ``python -m pygments``.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import sys
+import pygments.cmdline
+
+try:
+ sys.exit(pygments.cmdline.main(sys.argv))
+except KeyboardInterrupt:
+ sys.exit(1)
diff --git a/contrib/python/Pygments/py3/pygments/cmdline.py b/contrib/python/Pygments/py3/pygments/cmdline.py
index 4f688c7f8e..1bc4d8e423 100644
--- a/contrib/python/Pygments/py3/pygments/cmdline.py
+++ b/contrib/python/Pygments/py3/pygments/cmdline.py
@@ -4,20 +4,20 @@
Command line interface.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import os
import sys
-import shutil
-import argparse
+import shutil
+import argparse
from textwrap import dedent
from pygments import __version__, highlight
from pygments.util import ClassNotFound, OptionError, docstring_headline, \
- guess_decode, guess_decode_from_terminal, terminal_encoding, \
- UnclosingTextIOWrapper
+ guess_decode, guess_decode_from_terminal, terminal_encoding, \
+ UnclosingTextIOWrapper
from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename
from pygments.lexers.special import TextLexer
@@ -179,21 +179,21 @@ def _print_list_as_json(requested_items):
json.dump(result, sys.stdout)
-def main_inner(parser, argns):
- if argns.help:
- parser.print_help()
+def main_inner(parser, argns):
+ if argns.help:
+ parser.print_help()
return 0
- if argns.V:
- print('Pygments version %s, (c) 2006-2021 by Georg Brandl, Matthäus '
- 'Chajdas and contributors.' % __version__)
+ if argns.V:
+ print('Pygments version %s, (c) 2006-2021 by Georg Brandl, Matthäus '
+ 'Chajdas and contributors.' % __version__)
return 0
- def is_only_option(opt):
- return not any(v for (k, v) in vars(argns).items() if k != opt)
-
+ def is_only_option(opt):
+ return not any(v for (k, v) in vars(argns).items() if k != opt)
+
# handle ``pygmentize -L``
- if argns.L is not None:
+ if argns.L is not None:
arg_set = set()
for k, v in vars(argns).items():
if v:
@@ -203,19 +203,19 @@ def main_inner(parser, argns):
arg_set.discard('json')
if arg_set:
- parser.print_help(sys.stderr)
+ parser.print_help(sys.stderr)
return 2
# print version
if not argns.json:
main(['', '-V'])
- allowed_types = {'lexer', 'formatter', 'filter', 'style'}
- largs = [arg.rstrip('s') for arg in argns.L]
- if any(arg not in allowed_types for arg in largs):
- parser.print_help(sys.stderr)
- return 0
- if not largs:
- largs = allowed_types
+ allowed_types = {'lexer', 'formatter', 'filter', 'style'}
+ largs = [arg.rstrip('s') for arg in argns.L]
+ if any(arg not in allowed_types for arg in largs):
+ parser.print_help(sys.stderr)
+ return 0
+ if not largs:
+ largs = allowed_types
if not argns.json:
for arg in largs:
_print_list(arg)
@@ -224,21 +224,21 @@ def main_inner(parser, argns):
return 0
# handle ``pygmentize -H``
- if argns.H:
- if not is_only_option('H'):
- parser.print_help(sys.stderr)
+ if argns.H:
+ if not is_only_option('H'):
+ parser.print_help(sys.stderr)
return 2
- what, name = argns.H
+ what, name = argns.H
if what not in ('lexer', 'formatter', 'filter'):
- parser.print_help(sys.stderr)
+ parser.print_help(sys.stderr)
return 2
return _print_help(what, name)
# parse -O options
- parsed_opts = _parse_options(argns.O or [])
+ parsed_opts = _parse_options(argns.O or [])
# parse -P options
- for p_opt in argns.P or []:
+ for p_opt in argns.P or []:
try:
name, value = p_opt.split('=', 1)
except ValueError:
@@ -251,35 +251,35 @@ def main_inner(parser, argns):
outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
# handle ``pygmentize -N``
- if argns.N:
- lexer = find_lexer_class_for_filename(argns.N)
+ if argns.N:
+ lexer = find_lexer_class_for_filename(argns.N)
if lexer is None:
lexer = TextLexer
print(lexer.aliases[0])
return 0
- # handle ``pygmentize -C``
- if argns.C:
- inp = sys.stdin.buffer.read()
- try:
- lexer = guess_lexer(inp, inencoding=inencoding)
- except ClassNotFound:
- lexer = TextLexer
-
- print(lexer.aliases[0])
- return 0
-
+ # handle ``pygmentize -C``
+ if argns.C:
+ inp = sys.stdin.buffer.read()
+ try:
+ lexer = guess_lexer(inp, inencoding=inencoding)
+ except ClassNotFound:
+ lexer = TextLexer
+
+ print(lexer.aliases[0])
+ return 0
+
# handle ``pygmentize -S``
- S_opt = argns.S
- a_opt = argns.a
+ S_opt = argns.S
+ a_opt = argns.a
if S_opt is not None:
- f_opt = argns.f
+ f_opt = argns.f
if not f_opt:
- parser.print_help(sys.stderr)
+ parser.print_help(sys.stderr)
return 2
- if argns.l or argns.INPUTFILE:
- parser.print_help(sys.stderr)
+ if argns.l or argns.INPUTFILE:
+ parser.print_help(sys.stderr)
return 2
try:
@@ -293,36 +293,36 @@ def main_inner(parser, argns):
return 0
# if no -S is given, -a is not allowed
- if argns.a is not None:
- parser.print_help(sys.stderr)
+ if argns.a is not None:
+ parser.print_help(sys.stderr)
return 2
# parse -F options
- F_opts = _parse_filters(argns.F or [])
+ F_opts = _parse_filters(argns.F or [])
# -x: allow custom (eXternal) lexers and formatters
- allow_custom_lexer_formatter = bool(argns.x)
+ allow_custom_lexer_formatter = bool(argns.x)
# select lexer
lexer = None
# given by name?
- lexername = argns.l
+ lexername = argns.l
if lexername:
# custom lexer, located relative to user's cwd
if allow_custom_lexer_formatter and '.py' in lexername:
try:
- filename = None
- name = None
+ filename = None
+ name = None
if ':' in lexername:
filename, name = lexername.rsplit(':', 1)
-
- if '.py' in name:
- # This can happen on Windows: If the lexername is
- # C:\lexer.py -- return to normal load path in that case
- name = None
-
- if filename and name:
+
+ if '.py' in name:
+ # This can happen on Windows: If the lexername is
+ # C:\lexer.py -- return to normal load path in that case
+ name = None
+
+ if filename and name:
lexer = load_lexer_from_file(filename, name,
**parsed_opts)
else:
@@ -340,13 +340,13 @@ def main_inner(parser, argns):
# read input code
code = None
- if argns.INPUTFILE:
- if argns.s:
+ if argns.INPUTFILE:
+ if argns.s:
print('Error: -s option not usable when input file specified',
file=sys.stderr)
return 2
- infn = argns.INPUTFILE
+ infn = argns.INPUTFILE
try:
with open(infn, 'rb') as infp:
code = infp.read()
@@ -361,7 +361,7 @@ def main_inner(parser, argns):
try:
lexer = get_lexer_for_filename(infn, code, **parsed_opts)
except ClassNotFound as err:
- if argns.g:
+ if argns.g:
try:
lexer = guess_lexer(code, **parsed_opts)
except ClassNotFound:
@@ -373,10 +373,10 @@ def main_inner(parser, argns):
print('Error:', err, file=sys.stderr)
return 1
- elif not argns.s: # treat stdin as full file (-s support is later)
+ elif not argns.s: # treat stdin as full file (-s support is later)
# read code from terminal, always in binary mode since we want to
# decode ourselves and be tolerant with it
- code = sys.stdin.buffer.read() # use .buffer to get a binary stream
+ code = sys.stdin.buffer.read() # use .buffer to get a binary stream
if not inencoding:
code, inencoding = guess_decode_from_terminal(code, sys.stdin)
# else the lexer will do the decoding
@@ -401,24 +401,24 @@ def main_inner(parser, argns):
return 1
# select formatter
- outfn = argns.o
- fmter = argns.f
+ outfn = argns.o
+ fmter = argns.f
if fmter:
# custom formatter, located relative to user's cwd
if allow_custom_lexer_formatter and '.py' in fmter:
try:
- filename = None
- name = None
+ filename = None
+ name = None
if ':' in fmter:
- # Same logic as above for custom lexer
- filename, name = fmter.rsplit(':', 1)
-
- if '.py' in name:
- name = None
-
- if filename and name:
- fmter = load_formatter_from_file(filename, name,
- **parsed_opts)
+ # Same logic as above for custom lexer
+ filename, name = fmter.rsplit(':', 1)
+
+ if '.py' in name:
+ name = None
+
+ if filename and name:
+ fmter = load_formatter_from_file(filename, name,
+ **parsed_opts)
else:
fmter = load_formatter_from_file(fmter, **parsed_opts)
except ClassNotFound as err:
@@ -449,7 +449,7 @@ def main_inner(parser, argns):
fmter = Terminal256Formatter(**parsed_opts)
else:
fmter = TerminalFormatter(**parsed_opts)
- outfile = sys.stdout.buffer
+ outfile = sys.stdout.buffer
# determine output encoding if not explicitly selected
if not outencoding:
@@ -464,8 +464,8 @@ def main_inner(parser, argns):
if not outfn and sys.platform in ('win32', 'cygwin') and \
fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover
# unfortunately colorama doesn't support binary streams on Py3
- outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
- fmter.encoding = None
+ outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
+ fmter.encoding = None
try:
import colorama.initialise
except ImportError:
@@ -484,19 +484,19 @@ def main_inner(parser, argns):
lexer = LatexEmbeddedLexer(left, right, lexer)
# ... and do it!
- if not argns.s:
+ if not argns.s:
# process whole input as per normal...
- try:
- highlight(code, lexer, fmter, outfile)
- finally:
- if outfn:
- outfile.close()
+ try:
+ highlight(code, lexer, fmter, outfile)
+ finally:
+ if outfn:
+ outfile.close()
return 0
else:
# line by line processing of stdin (eg: for 'tail -f')...
try:
while 1:
- line = sys.stdin.buffer.readline()
+ line = sys.stdin.buffer.readline()
if not line:
break
if not inencoding:
@@ -507,144 +507,144 @@ def main_inner(parser, argns):
return 0
except KeyboardInterrupt: # pragma: no cover
return 0
- finally:
- if outfn:
- outfile.close()
-
-
-class HelpFormatter(argparse.HelpFormatter):
- def __init__(self, prog, indent_increment=2, max_help_position=16, width=None):
- if width is None:
- try:
- width = shutil.get_terminal_size().columns - 2
- except Exception:
- pass
- argparse.HelpFormatter.__init__(self, prog, indent_increment,
- max_help_position, width)
-
-
+ finally:
+ if outfn:
+ outfile.close()
+
+
+class HelpFormatter(argparse.HelpFormatter):
+ def __init__(self, prog, indent_increment=2, max_help_position=16, width=None):
+ if width is None:
+ try:
+ width = shutil.get_terminal_size().columns - 2
+ except Exception:
+ pass
+ argparse.HelpFormatter.__init__(self, prog, indent_increment,
+ max_help_position, width)
+
+
def main(args=sys.argv):
"""
Main command line entry point.
"""
- desc = "Highlight an input file and write the result to an output file."
- parser = argparse.ArgumentParser(description=desc, add_help=False,
- formatter_class=HelpFormatter)
-
- operation = parser.add_argument_group('Main operation')
- lexersel = operation.add_mutually_exclusive_group()
- lexersel.add_argument(
- '-l', metavar='LEXER',
- help='Specify the lexer to use. (Query names with -L.) If not '
- 'given and -g is not present, the lexer is guessed from the filename.')
- lexersel.add_argument(
- '-g', action='store_true',
- help='Guess the lexer from the file contents, or pass through '
- 'as plain text if nothing can be guessed.')
- operation.add_argument(
- '-F', metavar='FILTER[:options]', action='append',
- help='Add a filter to the token stream. (Query names with -L.) '
- 'Filter options are given after a colon if necessary.')
- operation.add_argument(
- '-f', metavar='FORMATTER',
- help='Specify the formatter to use. (Query names with -L.) '
- 'If not given, the formatter is guessed from the output filename, '
- 'and defaults to the terminal formatter if the output is to the '
- 'terminal or an unknown file extension.')
- operation.add_argument(
- '-O', metavar='OPTION=value[,OPTION=value,...]', action='append',
- help='Give options to the lexer and formatter as a comma-separated '
- 'list of key-value pairs. '
- 'Example: `-O bg=light,python=cool`.')
- operation.add_argument(
- '-P', metavar='OPTION=value', action='append',
- help='Give a single option to the lexer and formatter - with this '
- 'you can pass options whose value contains commas and equal signs. '
- 'Example: `-P "heading=Pygments, the Python highlighter"`.')
- operation.add_argument(
- '-o', metavar='OUTPUTFILE',
- help='Where to write the output. Defaults to standard output.')
-
- operation.add_argument(
- 'INPUTFILE', nargs='?',
- help='Where to read the input. Defaults to standard input.')
-
- flags = parser.add_argument_group('Operation flags')
- flags.add_argument(
- '-v', action='store_true',
- help='Print a detailed traceback on unhandled exceptions, which '
- 'is useful for debugging and bug reports.')
- flags.add_argument(
- '-s', action='store_true',
- help='Process lines one at a time until EOF, rather than waiting to '
- 'process the entire file. This only works for stdin, only for lexers '
- 'with no line-spanning constructs, and is intended for streaming '
- 'input such as you get from `tail -f`. '
- 'Example usage: `tail -f sql.log | pygmentize -s -l sql`.')
- flags.add_argument(
- '-x', action='store_true',
- help='Allow custom lexers and formatters to be loaded from a .py file '
- 'relative to the current working directory. For example, '
- '`-l ./customlexer.py -x`. By default, this option expects a file '
- 'with a class named CustomLexer or CustomFormatter; you can also '
- 'specify your own class name with a colon (`-l ./lexer.py:MyLexer`). '
- 'Users should be very careful not to use this option with untrusted '
- 'files, because it will import and run them.')
+ desc = "Highlight an input file and write the result to an output file."
+ parser = argparse.ArgumentParser(description=desc, add_help=False,
+ formatter_class=HelpFormatter)
+
+ operation = parser.add_argument_group('Main operation')
+ lexersel = operation.add_mutually_exclusive_group()
+ lexersel.add_argument(
+ '-l', metavar='LEXER',
+ help='Specify the lexer to use. (Query names with -L.) If not '
+ 'given and -g is not present, the lexer is guessed from the filename.')
+ lexersel.add_argument(
+ '-g', action='store_true',
+ help='Guess the lexer from the file contents, or pass through '
+ 'as plain text if nothing can be guessed.')
+ operation.add_argument(
+ '-F', metavar='FILTER[:options]', action='append',
+ help='Add a filter to the token stream. (Query names with -L.) '
+ 'Filter options are given after a colon if necessary.')
+ operation.add_argument(
+ '-f', metavar='FORMATTER',
+ help='Specify the formatter to use. (Query names with -L.) '
+ 'If not given, the formatter is guessed from the output filename, '
+ 'and defaults to the terminal formatter if the output is to the '
+ 'terminal or an unknown file extension.')
+ operation.add_argument(
+ '-O', metavar='OPTION=value[,OPTION=value,...]', action='append',
+ help='Give options to the lexer and formatter as a comma-separated '
+ 'list of key-value pairs. '
+ 'Example: `-O bg=light,python=cool`.')
+ operation.add_argument(
+ '-P', metavar='OPTION=value', action='append',
+ help='Give a single option to the lexer and formatter - with this '
+ 'you can pass options whose value contains commas and equal signs. '
+ 'Example: `-P "heading=Pygments, the Python highlighter"`.')
+ operation.add_argument(
+ '-o', metavar='OUTPUTFILE',
+ help='Where to write the output. Defaults to standard output.')
+
+ operation.add_argument(
+ 'INPUTFILE', nargs='?',
+ help='Where to read the input. Defaults to standard input.')
+
+ flags = parser.add_argument_group('Operation flags')
+ flags.add_argument(
+ '-v', action='store_true',
+ help='Print a detailed traceback on unhandled exceptions, which '
+ 'is useful for debugging and bug reports.')
+ flags.add_argument(
+ '-s', action='store_true',
+ help='Process lines one at a time until EOF, rather than waiting to '
+ 'process the entire file. This only works for stdin, only for lexers '
+ 'with no line-spanning constructs, and is intended for streaming '
+ 'input such as you get from `tail -f`. '
+ 'Example usage: `tail -f sql.log | pygmentize -s -l sql`.')
+ flags.add_argument(
+ '-x', action='store_true',
+ help='Allow custom lexers and formatters to be loaded from a .py file '
+ 'relative to the current working directory. For example, '
+ '`-l ./customlexer.py -x`. By default, this option expects a file '
+ 'with a class named CustomLexer or CustomFormatter; you can also '
+ 'specify your own class name with a colon (`-l ./lexer.py:MyLexer`). '
+ 'Users should be very careful not to use this option with untrusted '
+ 'files, because it will import and run them.')
flags.add_argument('--json', help='Output as JSON. This can '
'be only used in conjunction with -L.',
default=False,
action='store_true')
-
- special_modes_group = parser.add_argument_group(
- 'Special modes - do not do any highlighting')
- special_modes = special_modes_group.add_mutually_exclusive_group()
- special_modes.add_argument(
- '-S', metavar='STYLE -f formatter',
- help='Print style definitions for STYLE for a formatter '
- 'given with -f. The argument given by -a is formatter '
- 'dependent.')
- special_modes.add_argument(
- '-L', nargs='*', metavar='WHAT',
- help='List lexers, formatters, styles or filters -- '
- 'give additional arguments for the thing(s) you want to list '
- '(e.g. "styles"), or omit them to list everything.')
- special_modes.add_argument(
- '-N', metavar='FILENAME',
- help='Guess and print out a lexer name based solely on the given '
- 'filename. Does not take input or highlight anything. If no specific '
- 'lexer can be determined, "text" is printed.')
- special_modes.add_argument(
- '-C', action='store_true',
- help='Like -N, but print out a lexer name based solely on '
- 'a given content from standard input.')
- special_modes.add_argument(
- '-H', action='store', nargs=2, metavar=('NAME', 'TYPE'),
- help='Print detailed help for the object <name> of type <type>, '
- 'where <type> is one of "lexer", "formatter" or "filter".')
- special_modes.add_argument(
- '-V', action='store_true',
- help='Print the package version.')
- special_modes.add_argument(
- '-h', '--help', action='store_true',
- help='Print this help.')
- special_modes_group.add_argument(
- '-a', metavar='ARG',
- help='Formatter-specific additional argument for the -S (print '
- 'style sheet) mode.')
-
- argns = parser.parse_args(args[1:])
-
+
+ special_modes_group = parser.add_argument_group(
+ 'Special modes - do not do any highlighting')
+ special_modes = special_modes_group.add_mutually_exclusive_group()
+ special_modes.add_argument(
+ '-S', metavar='STYLE -f formatter',
+ help='Print style definitions for STYLE for a formatter '
+ 'given with -f. The argument given by -a is formatter '
+ 'dependent.')
+ special_modes.add_argument(
+ '-L', nargs='*', metavar='WHAT',
+ help='List lexers, formatters, styles or filters -- '
+ 'give additional arguments for the thing(s) you want to list '
+ '(e.g. "styles"), or omit them to list everything.')
+ special_modes.add_argument(
+ '-N', metavar='FILENAME',
+ help='Guess and print out a lexer name based solely on the given '
+ 'filename. Does not take input or highlight anything. If no specific '
+ 'lexer can be determined, "text" is printed.')
+ special_modes.add_argument(
+ '-C', action='store_true',
+ help='Like -N, but print out a lexer name based solely on '
+ 'a given content from standard input.')
+ special_modes.add_argument(
+ '-H', action='store', nargs=2, metavar=('NAME', 'TYPE'),
+ help='Print detailed help for the object <name> of type <type>, '
+ 'where <type> is one of "lexer", "formatter" or "filter".')
+ special_modes.add_argument(
+ '-V', action='store_true',
+ help='Print the package version.')
+ special_modes.add_argument(
+ '-h', '--help', action='store_true',
+ help='Print this help.')
+ special_modes_group.add_argument(
+ '-a', metavar='ARG',
+ help='Formatter-specific additional argument for the -S (print '
+ 'style sheet) mode.')
+
+ argns = parser.parse_args(args[1:])
+
try:
- return main_inner(parser, argns)
+ return main_inner(parser, argns)
except Exception:
- if argns.v:
+ if argns.v:
print(file=sys.stderr)
print('*' * 65, file=sys.stderr)
print('An unhandled exception occurred while highlighting.',
file=sys.stderr)
print('Please report the whole traceback to the issue tracker at',
file=sys.stderr)
- print('<https://github.com/pygments/pygments/issues>.',
+ print('<https://github.com/pygments/pygments/issues>.',
file=sys.stderr)
print('*' * 65, file=sys.stderr)
print(file=sys.stderr)
diff --git a/contrib/python/Pygments/py3/pygments/console.py b/contrib/python/Pygments/py3/pygments/console.py
index 8dd08abebc..9e71ec172e 100644
--- a/contrib/python/Pygments/py3/pygments/console.py
+++ b/contrib/python/Pygments/py3/pygments/console.py
@@ -4,7 +4,7 @@
Format colored console output.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/filter.py b/contrib/python/Pygments/py3/pygments/filter.py
index 85b4829878..f7c063dfa4 100644
--- a/contrib/python/Pygments/py3/pygments/filter.py
+++ b/contrib/python/Pygments/py3/pygments/filter.py
@@ -4,7 +4,7 @@
Module that implements the default filter.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -16,7 +16,7 @@ def apply_filters(stream, filters, lexer=None):
filter, otherwise the filter receives `None`.
"""
def _apply(filter_, stream):
- yield from filter_.filter(lexer, stream)
+ yield from filter_.filter(lexer, stream)
for filter_ in filters:
stream = _apply(filter_, stream)
return stream
@@ -38,7 +38,7 @@ def simplefilter(f):
})
-class Filter:
+class Filter:
"""
Default filter. Subclass this class or use the `simplefilter`
decorator to create own filters.
@@ -68,4 +68,4 @@ class FunctionFilter(Filter):
def filter(self, lexer, stream):
# pylint: disable=not-callable
- yield from self.function(lexer, stream, self.options)
+ yield from self.function(lexer, stream, self.options)
diff --git a/contrib/python/Pygments/py3/pygments/filters/__init__.py b/contrib/python/Pygments/py3/pygments/filters/__init__.py
index 930ff64e05..f615c89dfa 100644
--- a/contrib/python/Pygments/py3/pygments/filters/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/filters/__init__.py
@@ -5,7 +5,7 @@
Module containing filter lookup functions and default
filters.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -15,7 +15,7 @@ from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
string_to_tokentype
from pygments.filter import Filter
from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
- get_choice_opt, ClassNotFound, OptionError
+ get_choice_opt, ClassNotFound, OptionError
from pygments.plugin import find_plugin_filters
@@ -44,7 +44,7 @@ def get_filter_by_name(filtername, **options):
def get_all_filters():
"""Return a generator of all filter names."""
- yield from FILTERS
+ yield from FILTERS
for name, _ in find_plugin_filters():
yield name
@@ -86,601 +86,601 @@ class CodeTagFilter(Filter):
if ttype in String.Doc or \
ttype in Comment and \
ttype not in Comment.Preproc:
- yield from _replace_special(ttype, value, regex, Comment.Special)
- else:
- yield ttype, value
-
-
-class SymbolFilter(Filter):
- """Convert mathematical symbols such as \\<longrightarrow> in Isabelle
- or \\longrightarrow in LaTeX into Unicode characters.
-
- This is mostly useful for HTML or console output when you want to
- approximate the source rendering you'd see in an IDE.
-
- Options accepted:
-
- `lang` : string
- The symbol language. Must be one of ``'isabelle'`` or
- ``'latex'``. The default is ``'isabelle'``.
- """
-
- latex_symbols = {
- '\\alpha' : '\U000003b1',
- '\\beta' : '\U000003b2',
- '\\gamma' : '\U000003b3',
- '\\delta' : '\U000003b4',
- '\\varepsilon' : '\U000003b5',
- '\\zeta' : '\U000003b6',
- '\\eta' : '\U000003b7',
- '\\vartheta' : '\U000003b8',
- '\\iota' : '\U000003b9',
- '\\kappa' : '\U000003ba',
- '\\lambda' : '\U000003bb',
- '\\mu' : '\U000003bc',
- '\\nu' : '\U000003bd',
- '\\xi' : '\U000003be',
- '\\pi' : '\U000003c0',
- '\\varrho' : '\U000003c1',
- '\\sigma' : '\U000003c3',
- '\\tau' : '\U000003c4',
- '\\upsilon' : '\U000003c5',
- '\\varphi' : '\U000003c6',
- '\\chi' : '\U000003c7',
- '\\psi' : '\U000003c8',
- '\\omega' : '\U000003c9',
- '\\Gamma' : '\U00000393',
- '\\Delta' : '\U00000394',
- '\\Theta' : '\U00000398',
- '\\Lambda' : '\U0000039b',
- '\\Xi' : '\U0000039e',
- '\\Pi' : '\U000003a0',
- '\\Sigma' : '\U000003a3',
- '\\Upsilon' : '\U000003a5',
- '\\Phi' : '\U000003a6',
- '\\Psi' : '\U000003a8',
- '\\Omega' : '\U000003a9',
- '\\leftarrow' : '\U00002190',
- '\\longleftarrow' : '\U000027f5',
- '\\rightarrow' : '\U00002192',
- '\\longrightarrow' : '\U000027f6',
- '\\Leftarrow' : '\U000021d0',
- '\\Longleftarrow' : '\U000027f8',
- '\\Rightarrow' : '\U000021d2',
- '\\Longrightarrow' : '\U000027f9',
- '\\leftrightarrow' : '\U00002194',
- '\\longleftrightarrow' : '\U000027f7',
- '\\Leftrightarrow' : '\U000021d4',
- '\\Longleftrightarrow' : '\U000027fa',
- '\\mapsto' : '\U000021a6',
- '\\longmapsto' : '\U000027fc',
- '\\relbar' : '\U00002500',
- '\\Relbar' : '\U00002550',
- '\\hookleftarrow' : '\U000021a9',
- '\\hookrightarrow' : '\U000021aa',
- '\\leftharpoondown' : '\U000021bd',
- '\\rightharpoondown' : '\U000021c1',
- '\\leftharpoonup' : '\U000021bc',
- '\\rightharpoonup' : '\U000021c0',
- '\\rightleftharpoons' : '\U000021cc',
- '\\leadsto' : '\U0000219d',
- '\\downharpoonleft' : '\U000021c3',
- '\\downharpoonright' : '\U000021c2',
- '\\upharpoonleft' : '\U000021bf',
- '\\upharpoonright' : '\U000021be',
- '\\restriction' : '\U000021be',
- '\\uparrow' : '\U00002191',
- '\\Uparrow' : '\U000021d1',
- '\\downarrow' : '\U00002193',
- '\\Downarrow' : '\U000021d3',
- '\\updownarrow' : '\U00002195',
- '\\Updownarrow' : '\U000021d5',
- '\\langle' : '\U000027e8',
- '\\rangle' : '\U000027e9',
- '\\lceil' : '\U00002308',
- '\\rceil' : '\U00002309',
- '\\lfloor' : '\U0000230a',
- '\\rfloor' : '\U0000230b',
- '\\flqq' : '\U000000ab',
- '\\frqq' : '\U000000bb',
- '\\bot' : '\U000022a5',
- '\\top' : '\U000022a4',
- '\\wedge' : '\U00002227',
- '\\bigwedge' : '\U000022c0',
- '\\vee' : '\U00002228',
- '\\bigvee' : '\U000022c1',
- '\\forall' : '\U00002200',
- '\\exists' : '\U00002203',
- '\\nexists' : '\U00002204',
- '\\neg' : '\U000000ac',
- '\\Box' : '\U000025a1',
- '\\Diamond' : '\U000025c7',
- '\\vdash' : '\U000022a2',
- '\\models' : '\U000022a8',
- '\\dashv' : '\U000022a3',
- '\\surd' : '\U0000221a',
- '\\le' : '\U00002264',
- '\\ge' : '\U00002265',
- '\\ll' : '\U0000226a',
- '\\gg' : '\U0000226b',
- '\\lesssim' : '\U00002272',
- '\\gtrsim' : '\U00002273',
- '\\lessapprox' : '\U00002a85',
- '\\gtrapprox' : '\U00002a86',
- '\\in' : '\U00002208',
- '\\notin' : '\U00002209',
- '\\subset' : '\U00002282',
- '\\supset' : '\U00002283',
- '\\subseteq' : '\U00002286',
- '\\supseteq' : '\U00002287',
- '\\sqsubset' : '\U0000228f',
- '\\sqsupset' : '\U00002290',
- '\\sqsubseteq' : '\U00002291',
- '\\sqsupseteq' : '\U00002292',
- '\\cap' : '\U00002229',
- '\\bigcap' : '\U000022c2',
- '\\cup' : '\U0000222a',
- '\\bigcup' : '\U000022c3',
- '\\sqcup' : '\U00002294',
- '\\bigsqcup' : '\U00002a06',
- '\\sqcap' : '\U00002293',
- '\\Bigsqcap' : '\U00002a05',
- '\\setminus' : '\U00002216',
- '\\propto' : '\U0000221d',
- '\\uplus' : '\U0000228e',
- '\\bigplus' : '\U00002a04',
- '\\sim' : '\U0000223c',
- '\\doteq' : '\U00002250',
- '\\simeq' : '\U00002243',
- '\\approx' : '\U00002248',
- '\\asymp' : '\U0000224d',
- '\\cong' : '\U00002245',
- '\\equiv' : '\U00002261',
- '\\Join' : '\U000022c8',
- '\\bowtie' : '\U00002a1d',
- '\\prec' : '\U0000227a',
- '\\succ' : '\U0000227b',
- '\\preceq' : '\U0000227c',
- '\\succeq' : '\U0000227d',
- '\\parallel' : '\U00002225',
- '\\mid' : '\U000000a6',
- '\\pm' : '\U000000b1',
- '\\mp' : '\U00002213',
- '\\times' : '\U000000d7',
- '\\div' : '\U000000f7',
- '\\cdot' : '\U000022c5',
- '\\star' : '\U000022c6',
- '\\circ' : '\U00002218',
- '\\dagger' : '\U00002020',
- '\\ddagger' : '\U00002021',
- '\\lhd' : '\U000022b2',
- '\\rhd' : '\U000022b3',
- '\\unlhd' : '\U000022b4',
- '\\unrhd' : '\U000022b5',
- '\\triangleleft' : '\U000025c3',
- '\\triangleright' : '\U000025b9',
- '\\triangle' : '\U000025b3',
- '\\triangleq' : '\U0000225c',
- '\\oplus' : '\U00002295',
- '\\bigoplus' : '\U00002a01',
- '\\otimes' : '\U00002297',
- '\\bigotimes' : '\U00002a02',
- '\\odot' : '\U00002299',
- '\\bigodot' : '\U00002a00',
- '\\ominus' : '\U00002296',
- '\\oslash' : '\U00002298',
- '\\dots' : '\U00002026',
- '\\cdots' : '\U000022ef',
- '\\sum' : '\U00002211',
- '\\prod' : '\U0000220f',
- '\\coprod' : '\U00002210',
- '\\infty' : '\U0000221e',
- '\\int' : '\U0000222b',
- '\\oint' : '\U0000222e',
- '\\clubsuit' : '\U00002663',
- '\\diamondsuit' : '\U00002662',
- '\\heartsuit' : '\U00002661',
- '\\spadesuit' : '\U00002660',
- '\\aleph' : '\U00002135',
- '\\emptyset' : '\U00002205',
- '\\nabla' : '\U00002207',
- '\\partial' : '\U00002202',
- '\\flat' : '\U0000266d',
- '\\natural' : '\U0000266e',
- '\\sharp' : '\U0000266f',
- '\\angle' : '\U00002220',
- '\\copyright' : '\U000000a9',
- '\\textregistered' : '\U000000ae',
- '\\textonequarter' : '\U000000bc',
- '\\textonehalf' : '\U000000bd',
- '\\textthreequarters' : '\U000000be',
- '\\textordfeminine' : '\U000000aa',
- '\\textordmasculine' : '\U000000ba',
- '\\euro' : '\U000020ac',
- '\\pounds' : '\U000000a3',
- '\\yen' : '\U000000a5',
- '\\textcent' : '\U000000a2',
- '\\textcurrency' : '\U000000a4',
- '\\textdegree' : '\U000000b0',
- }
-
- isabelle_symbols = {
- '\\<zero>' : '\U0001d7ec',
- '\\<one>' : '\U0001d7ed',
- '\\<two>' : '\U0001d7ee',
- '\\<three>' : '\U0001d7ef',
- '\\<four>' : '\U0001d7f0',
- '\\<five>' : '\U0001d7f1',
- '\\<six>' : '\U0001d7f2',
- '\\<seven>' : '\U0001d7f3',
- '\\<eight>' : '\U0001d7f4',
- '\\<nine>' : '\U0001d7f5',
- '\\<A>' : '\U0001d49c',
- '\\<B>' : '\U0000212c',
- '\\<C>' : '\U0001d49e',
- '\\<D>' : '\U0001d49f',
- '\\<E>' : '\U00002130',
- '\\<F>' : '\U00002131',
- '\\<G>' : '\U0001d4a2',
- '\\<H>' : '\U0000210b',
- '\\<I>' : '\U00002110',
- '\\<J>' : '\U0001d4a5',
- '\\<K>' : '\U0001d4a6',
- '\\<L>' : '\U00002112',
- '\\<M>' : '\U00002133',
- '\\<N>' : '\U0001d4a9',
- '\\<O>' : '\U0001d4aa',
- '\\<P>' : '\U0001d4ab',
- '\\<Q>' : '\U0001d4ac',
- '\\<R>' : '\U0000211b',
- '\\<S>' : '\U0001d4ae',
- '\\<T>' : '\U0001d4af',
- '\\<U>' : '\U0001d4b0',
- '\\<V>' : '\U0001d4b1',
- '\\<W>' : '\U0001d4b2',
- '\\<X>' : '\U0001d4b3',
- '\\<Y>' : '\U0001d4b4',
- '\\<Z>' : '\U0001d4b5',
- '\\<a>' : '\U0001d5ba',
- '\\<b>' : '\U0001d5bb',
- '\\<c>' : '\U0001d5bc',
- '\\<d>' : '\U0001d5bd',
- '\\<e>' : '\U0001d5be',
- '\\<f>' : '\U0001d5bf',
- '\\<g>' : '\U0001d5c0',
- '\\<h>' : '\U0001d5c1',
- '\\<i>' : '\U0001d5c2',
- '\\<j>' : '\U0001d5c3',
- '\\<k>' : '\U0001d5c4',
- '\\<l>' : '\U0001d5c5',
- '\\<m>' : '\U0001d5c6',
- '\\<n>' : '\U0001d5c7',
- '\\<o>' : '\U0001d5c8',
- '\\<p>' : '\U0001d5c9',
- '\\<q>' : '\U0001d5ca',
- '\\<r>' : '\U0001d5cb',
- '\\<s>' : '\U0001d5cc',
- '\\<t>' : '\U0001d5cd',
- '\\<u>' : '\U0001d5ce',
- '\\<v>' : '\U0001d5cf',
- '\\<w>' : '\U0001d5d0',
- '\\<x>' : '\U0001d5d1',
- '\\<y>' : '\U0001d5d2',
- '\\<z>' : '\U0001d5d3',
- '\\<AA>' : '\U0001d504',
- '\\<BB>' : '\U0001d505',
- '\\<CC>' : '\U0000212d',
- '\\<DD>' : '\U0001d507',
- '\\<EE>' : '\U0001d508',
- '\\<FF>' : '\U0001d509',
- '\\<GG>' : '\U0001d50a',
- '\\<HH>' : '\U0000210c',
- '\\<II>' : '\U00002111',
- '\\<JJ>' : '\U0001d50d',
- '\\<KK>' : '\U0001d50e',
- '\\<LL>' : '\U0001d50f',
- '\\<MM>' : '\U0001d510',
- '\\<NN>' : '\U0001d511',
- '\\<OO>' : '\U0001d512',
- '\\<PP>' : '\U0001d513',
- '\\<QQ>' : '\U0001d514',
- '\\<RR>' : '\U0000211c',
- '\\<SS>' : '\U0001d516',
- '\\<TT>' : '\U0001d517',
- '\\<UU>' : '\U0001d518',
- '\\<VV>' : '\U0001d519',
- '\\<WW>' : '\U0001d51a',
- '\\<XX>' : '\U0001d51b',
- '\\<YY>' : '\U0001d51c',
- '\\<ZZ>' : '\U00002128',
- '\\<aa>' : '\U0001d51e',
- '\\<bb>' : '\U0001d51f',
- '\\<cc>' : '\U0001d520',
- '\\<dd>' : '\U0001d521',
- '\\<ee>' : '\U0001d522',
- '\\<ff>' : '\U0001d523',
- '\\<gg>' : '\U0001d524',
- '\\<hh>' : '\U0001d525',
- '\\<ii>' : '\U0001d526',
- '\\<jj>' : '\U0001d527',
- '\\<kk>' : '\U0001d528',
- '\\<ll>' : '\U0001d529',
- '\\<mm>' : '\U0001d52a',
- '\\<nn>' : '\U0001d52b',
- '\\<oo>' : '\U0001d52c',
- '\\<pp>' : '\U0001d52d',
- '\\<qq>' : '\U0001d52e',
- '\\<rr>' : '\U0001d52f',
- '\\<ss>' : '\U0001d530',
- '\\<tt>' : '\U0001d531',
- '\\<uu>' : '\U0001d532',
- '\\<vv>' : '\U0001d533',
- '\\<ww>' : '\U0001d534',
- '\\<xx>' : '\U0001d535',
- '\\<yy>' : '\U0001d536',
- '\\<zz>' : '\U0001d537',
- '\\<alpha>' : '\U000003b1',
- '\\<beta>' : '\U000003b2',
- '\\<gamma>' : '\U000003b3',
- '\\<delta>' : '\U000003b4',
- '\\<epsilon>' : '\U000003b5',
- '\\<zeta>' : '\U000003b6',
- '\\<eta>' : '\U000003b7',
- '\\<theta>' : '\U000003b8',
- '\\<iota>' : '\U000003b9',
- '\\<kappa>' : '\U000003ba',
- '\\<lambda>' : '\U000003bb',
- '\\<mu>' : '\U000003bc',
- '\\<nu>' : '\U000003bd',
- '\\<xi>' : '\U000003be',
- '\\<pi>' : '\U000003c0',
- '\\<rho>' : '\U000003c1',
- '\\<sigma>' : '\U000003c3',
- '\\<tau>' : '\U000003c4',
- '\\<upsilon>' : '\U000003c5',
- '\\<phi>' : '\U000003c6',
- '\\<chi>' : '\U000003c7',
- '\\<psi>' : '\U000003c8',
- '\\<omega>' : '\U000003c9',
- '\\<Gamma>' : '\U00000393',
- '\\<Delta>' : '\U00000394',
- '\\<Theta>' : '\U00000398',
- '\\<Lambda>' : '\U0000039b',
- '\\<Xi>' : '\U0000039e',
- '\\<Pi>' : '\U000003a0',
- '\\<Sigma>' : '\U000003a3',
- '\\<Upsilon>' : '\U000003a5',
- '\\<Phi>' : '\U000003a6',
- '\\<Psi>' : '\U000003a8',
- '\\<Omega>' : '\U000003a9',
- '\\<bool>' : '\U0001d539',
- '\\<complex>' : '\U00002102',
- '\\<nat>' : '\U00002115',
- '\\<rat>' : '\U0000211a',
- '\\<real>' : '\U0000211d',
- '\\<int>' : '\U00002124',
- '\\<leftarrow>' : '\U00002190',
- '\\<longleftarrow>' : '\U000027f5',
- '\\<rightarrow>' : '\U00002192',
- '\\<longrightarrow>' : '\U000027f6',
- '\\<Leftarrow>' : '\U000021d0',
- '\\<Longleftarrow>' : '\U000027f8',
- '\\<Rightarrow>' : '\U000021d2',
- '\\<Longrightarrow>' : '\U000027f9',
- '\\<leftrightarrow>' : '\U00002194',
- '\\<longleftrightarrow>' : '\U000027f7',
- '\\<Leftrightarrow>' : '\U000021d4',
- '\\<Longleftrightarrow>' : '\U000027fa',
- '\\<mapsto>' : '\U000021a6',
- '\\<longmapsto>' : '\U000027fc',
- '\\<midarrow>' : '\U00002500',
- '\\<Midarrow>' : '\U00002550',
- '\\<hookleftarrow>' : '\U000021a9',
- '\\<hookrightarrow>' : '\U000021aa',
- '\\<leftharpoondown>' : '\U000021bd',
- '\\<rightharpoondown>' : '\U000021c1',
- '\\<leftharpoonup>' : '\U000021bc',
- '\\<rightharpoonup>' : '\U000021c0',
- '\\<rightleftharpoons>' : '\U000021cc',
- '\\<leadsto>' : '\U0000219d',
- '\\<downharpoonleft>' : '\U000021c3',
- '\\<downharpoonright>' : '\U000021c2',
- '\\<upharpoonleft>' : '\U000021bf',
- '\\<upharpoonright>' : '\U000021be',
- '\\<restriction>' : '\U000021be',
- '\\<Colon>' : '\U00002237',
- '\\<up>' : '\U00002191',
- '\\<Up>' : '\U000021d1',
- '\\<down>' : '\U00002193',
- '\\<Down>' : '\U000021d3',
- '\\<updown>' : '\U00002195',
- '\\<Updown>' : '\U000021d5',
- '\\<langle>' : '\U000027e8',
- '\\<rangle>' : '\U000027e9',
- '\\<lceil>' : '\U00002308',
- '\\<rceil>' : '\U00002309',
- '\\<lfloor>' : '\U0000230a',
- '\\<rfloor>' : '\U0000230b',
- '\\<lparr>' : '\U00002987',
- '\\<rparr>' : '\U00002988',
- '\\<lbrakk>' : '\U000027e6',
- '\\<rbrakk>' : '\U000027e7',
- '\\<lbrace>' : '\U00002983',
- '\\<rbrace>' : '\U00002984',
- '\\<guillemotleft>' : '\U000000ab',
- '\\<guillemotright>' : '\U000000bb',
- '\\<bottom>' : '\U000022a5',
- '\\<top>' : '\U000022a4',
- '\\<and>' : '\U00002227',
- '\\<And>' : '\U000022c0',
- '\\<or>' : '\U00002228',
- '\\<Or>' : '\U000022c1',
- '\\<forall>' : '\U00002200',
- '\\<exists>' : '\U00002203',
- '\\<nexists>' : '\U00002204',
- '\\<not>' : '\U000000ac',
- '\\<box>' : '\U000025a1',
- '\\<diamond>' : '\U000025c7',
- '\\<turnstile>' : '\U000022a2',
- '\\<Turnstile>' : '\U000022a8',
- '\\<tturnstile>' : '\U000022a9',
- '\\<TTurnstile>' : '\U000022ab',
- '\\<stileturn>' : '\U000022a3',
- '\\<surd>' : '\U0000221a',
- '\\<le>' : '\U00002264',
- '\\<ge>' : '\U00002265',
- '\\<lless>' : '\U0000226a',
- '\\<ggreater>' : '\U0000226b',
- '\\<lesssim>' : '\U00002272',
- '\\<greatersim>' : '\U00002273',
- '\\<lessapprox>' : '\U00002a85',
- '\\<greaterapprox>' : '\U00002a86',
- '\\<in>' : '\U00002208',
- '\\<notin>' : '\U00002209',
- '\\<subset>' : '\U00002282',
- '\\<supset>' : '\U00002283',
- '\\<subseteq>' : '\U00002286',
- '\\<supseteq>' : '\U00002287',
- '\\<sqsubset>' : '\U0000228f',
- '\\<sqsupset>' : '\U00002290',
- '\\<sqsubseteq>' : '\U00002291',
- '\\<sqsupseteq>' : '\U00002292',
- '\\<inter>' : '\U00002229',
- '\\<Inter>' : '\U000022c2',
- '\\<union>' : '\U0000222a',
- '\\<Union>' : '\U000022c3',
- '\\<squnion>' : '\U00002294',
- '\\<Squnion>' : '\U00002a06',
- '\\<sqinter>' : '\U00002293',
- '\\<Sqinter>' : '\U00002a05',
- '\\<setminus>' : '\U00002216',
- '\\<propto>' : '\U0000221d',
- '\\<uplus>' : '\U0000228e',
- '\\<Uplus>' : '\U00002a04',
- '\\<noteq>' : '\U00002260',
- '\\<sim>' : '\U0000223c',
- '\\<doteq>' : '\U00002250',
- '\\<simeq>' : '\U00002243',
- '\\<approx>' : '\U00002248',
- '\\<asymp>' : '\U0000224d',
- '\\<cong>' : '\U00002245',
- '\\<smile>' : '\U00002323',
- '\\<equiv>' : '\U00002261',
- '\\<frown>' : '\U00002322',
- '\\<Join>' : '\U000022c8',
- '\\<bowtie>' : '\U00002a1d',
- '\\<prec>' : '\U0000227a',
- '\\<succ>' : '\U0000227b',
- '\\<preceq>' : '\U0000227c',
- '\\<succeq>' : '\U0000227d',
- '\\<parallel>' : '\U00002225',
- '\\<bar>' : '\U000000a6',
- '\\<plusminus>' : '\U000000b1',
- '\\<minusplus>' : '\U00002213',
- '\\<times>' : '\U000000d7',
- '\\<div>' : '\U000000f7',
- '\\<cdot>' : '\U000022c5',
- '\\<star>' : '\U000022c6',
- '\\<bullet>' : '\U00002219',
- '\\<circ>' : '\U00002218',
- '\\<dagger>' : '\U00002020',
- '\\<ddagger>' : '\U00002021',
- '\\<lhd>' : '\U000022b2',
- '\\<rhd>' : '\U000022b3',
- '\\<unlhd>' : '\U000022b4',
- '\\<unrhd>' : '\U000022b5',
- '\\<triangleleft>' : '\U000025c3',
- '\\<triangleright>' : '\U000025b9',
- '\\<triangle>' : '\U000025b3',
- '\\<triangleq>' : '\U0000225c',
- '\\<oplus>' : '\U00002295',
- '\\<Oplus>' : '\U00002a01',
- '\\<otimes>' : '\U00002297',
- '\\<Otimes>' : '\U00002a02',
- '\\<odot>' : '\U00002299',
- '\\<Odot>' : '\U00002a00',
- '\\<ominus>' : '\U00002296',
- '\\<oslash>' : '\U00002298',
- '\\<dots>' : '\U00002026',
- '\\<cdots>' : '\U000022ef',
- '\\<Sum>' : '\U00002211',
- '\\<Prod>' : '\U0000220f',
- '\\<Coprod>' : '\U00002210',
- '\\<infinity>' : '\U0000221e',
- '\\<integral>' : '\U0000222b',
- '\\<ointegral>' : '\U0000222e',
- '\\<clubsuit>' : '\U00002663',
- '\\<diamondsuit>' : '\U00002662',
- '\\<heartsuit>' : '\U00002661',
- '\\<spadesuit>' : '\U00002660',
- '\\<aleph>' : '\U00002135',
- '\\<emptyset>' : '\U00002205',
- '\\<nabla>' : '\U00002207',
- '\\<partial>' : '\U00002202',
- '\\<flat>' : '\U0000266d',
- '\\<natural>' : '\U0000266e',
- '\\<sharp>' : '\U0000266f',
- '\\<angle>' : '\U00002220',
- '\\<copyright>' : '\U000000a9',
- '\\<registered>' : '\U000000ae',
- '\\<hyphen>' : '\U000000ad',
- '\\<inverse>' : '\U000000af',
- '\\<onequarter>' : '\U000000bc',
- '\\<onehalf>' : '\U000000bd',
- '\\<threequarters>' : '\U000000be',
- '\\<ordfeminine>' : '\U000000aa',
- '\\<ordmasculine>' : '\U000000ba',
- '\\<section>' : '\U000000a7',
- '\\<paragraph>' : '\U000000b6',
- '\\<exclamdown>' : '\U000000a1',
- '\\<questiondown>' : '\U000000bf',
- '\\<euro>' : '\U000020ac',
- '\\<pounds>' : '\U000000a3',
- '\\<yen>' : '\U000000a5',
- '\\<cent>' : '\U000000a2',
- '\\<currency>' : '\U000000a4',
- '\\<degree>' : '\U000000b0',
- '\\<amalg>' : '\U00002a3f',
- '\\<mho>' : '\U00002127',
- '\\<lozenge>' : '\U000025ca',
- '\\<wp>' : '\U00002118',
- '\\<wrong>' : '\U00002240',
- '\\<struct>' : '\U000022c4',
- '\\<acute>' : '\U000000b4',
- '\\<index>' : '\U00000131',
- '\\<dieresis>' : '\U000000a8',
- '\\<cedilla>' : '\U000000b8',
- '\\<hungarumlaut>' : '\U000002dd',
- '\\<some>' : '\U000003f5',
- '\\<newline>' : '\U000023ce',
- '\\<open>' : '\U00002039',
- '\\<close>' : '\U0000203a',
- '\\<here>' : '\U00002302',
- '\\<^sub>' : '\U000021e9',
- '\\<^sup>' : '\U000021e7',
- '\\<^bold>' : '\U00002759',
- '\\<^bsub>' : '\U000021d8',
- '\\<^esub>' : '\U000021d9',
- '\\<^bsup>' : '\U000021d7',
- '\\<^esup>' : '\U000021d6',
- }
-
- lang_map = {'isabelle' : isabelle_symbols, 'latex' : latex_symbols}
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- lang = get_choice_opt(options, 'lang',
- ['isabelle', 'latex'], 'isabelle')
- self.symbols = self.lang_map[lang]
-
- def filter(self, lexer, stream):
- for ttype, value in stream:
- if value in self.symbols:
- yield ttype, self.symbols[value]
+ yield from _replace_special(ttype, value, regex, Comment.Special)
else:
yield ttype, value
+class SymbolFilter(Filter):
+ """Convert mathematical symbols such as \\<longrightarrow> in Isabelle
+ or \\longrightarrow in LaTeX into Unicode characters.
+
+ This is mostly useful for HTML or console output when you want to
+ approximate the source rendering you'd see in an IDE.
+
+ Options accepted:
+
+ `lang` : string
+ The symbol language. Must be one of ``'isabelle'`` or
+ ``'latex'``. The default is ``'isabelle'``.
+ """
+
+ latex_symbols = {
+ '\\alpha' : '\U000003b1',
+ '\\beta' : '\U000003b2',
+ '\\gamma' : '\U000003b3',
+ '\\delta' : '\U000003b4',
+ '\\varepsilon' : '\U000003b5',
+ '\\zeta' : '\U000003b6',
+ '\\eta' : '\U000003b7',
+ '\\vartheta' : '\U000003b8',
+ '\\iota' : '\U000003b9',
+ '\\kappa' : '\U000003ba',
+ '\\lambda' : '\U000003bb',
+ '\\mu' : '\U000003bc',
+ '\\nu' : '\U000003bd',
+ '\\xi' : '\U000003be',
+ '\\pi' : '\U000003c0',
+ '\\varrho' : '\U000003c1',
+ '\\sigma' : '\U000003c3',
+ '\\tau' : '\U000003c4',
+ '\\upsilon' : '\U000003c5',
+ '\\varphi' : '\U000003c6',
+ '\\chi' : '\U000003c7',
+ '\\psi' : '\U000003c8',
+ '\\omega' : '\U000003c9',
+ '\\Gamma' : '\U00000393',
+ '\\Delta' : '\U00000394',
+ '\\Theta' : '\U00000398',
+ '\\Lambda' : '\U0000039b',
+ '\\Xi' : '\U0000039e',
+ '\\Pi' : '\U000003a0',
+ '\\Sigma' : '\U000003a3',
+ '\\Upsilon' : '\U000003a5',
+ '\\Phi' : '\U000003a6',
+ '\\Psi' : '\U000003a8',
+ '\\Omega' : '\U000003a9',
+ '\\leftarrow' : '\U00002190',
+ '\\longleftarrow' : '\U000027f5',
+ '\\rightarrow' : '\U00002192',
+ '\\longrightarrow' : '\U000027f6',
+ '\\Leftarrow' : '\U000021d0',
+ '\\Longleftarrow' : '\U000027f8',
+ '\\Rightarrow' : '\U000021d2',
+ '\\Longrightarrow' : '\U000027f9',
+ '\\leftrightarrow' : '\U00002194',
+ '\\longleftrightarrow' : '\U000027f7',
+ '\\Leftrightarrow' : '\U000021d4',
+ '\\Longleftrightarrow' : '\U000027fa',
+ '\\mapsto' : '\U000021a6',
+ '\\longmapsto' : '\U000027fc',
+ '\\relbar' : '\U00002500',
+ '\\Relbar' : '\U00002550',
+ '\\hookleftarrow' : '\U000021a9',
+ '\\hookrightarrow' : '\U000021aa',
+ '\\leftharpoondown' : '\U000021bd',
+ '\\rightharpoondown' : '\U000021c1',
+ '\\leftharpoonup' : '\U000021bc',
+ '\\rightharpoonup' : '\U000021c0',
+ '\\rightleftharpoons' : '\U000021cc',
+ '\\leadsto' : '\U0000219d',
+ '\\downharpoonleft' : '\U000021c3',
+ '\\downharpoonright' : '\U000021c2',
+ '\\upharpoonleft' : '\U000021bf',
+ '\\upharpoonright' : '\U000021be',
+ '\\restriction' : '\U000021be',
+ '\\uparrow' : '\U00002191',
+ '\\Uparrow' : '\U000021d1',
+ '\\downarrow' : '\U00002193',
+ '\\Downarrow' : '\U000021d3',
+ '\\updownarrow' : '\U00002195',
+ '\\Updownarrow' : '\U000021d5',
+ '\\langle' : '\U000027e8',
+ '\\rangle' : '\U000027e9',
+ '\\lceil' : '\U00002308',
+ '\\rceil' : '\U00002309',
+ '\\lfloor' : '\U0000230a',
+ '\\rfloor' : '\U0000230b',
+ '\\flqq' : '\U000000ab',
+ '\\frqq' : '\U000000bb',
+ '\\bot' : '\U000022a5',
+ '\\top' : '\U000022a4',
+ '\\wedge' : '\U00002227',
+ '\\bigwedge' : '\U000022c0',
+ '\\vee' : '\U00002228',
+ '\\bigvee' : '\U000022c1',
+ '\\forall' : '\U00002200',
+ '\\exists' : '\U00002203',
+ '\\nexists' : '\U00002204',
+ '\\neg' : '\U000000ac',
+ '\\Box' : '\U000025a1',
+ '\\Diamond' : '\U000025c7',
+ '\\vdash' : '\U000022a2',
+ '\\models' : '\U000022a8',
+ '\\dashv' : '\U000022a3',
+ '\\surd' : '\U0000221a',
+ '\\le' : '\U00002264',
+ '\\ge' : '\U00002265',
+ '\\ll' : '\U0000226a',
+ '\\gg' : '\U0000226b',
+ '\\lesssim' : '\U00002272',
+ '\\gtrsim' : '\U00002273',
+ '\\lessapprox' : '\U00002a85',
+ '\\gtrapprox' : '\U00002a86',
+ '\\in' : '\U00002208',
+ '\\notin' : '\U00002209',
+ '\\subset' : '\U00002282',
+ '\\supset' : '\U00002283',
+ '\\subseteq' : '\U00002286',
+ '\\supseteq' : '\U00002287',
+ '\\sqsubset' : '\U0000228f',
+ '\\sqsupset' : '\U00002290',
+ '\\sqsubseteq' : '\U00002291',
+ '\\sqsupseteq' : '\U00002292',
+ '\\cap' : '\U00002229',
+ '\\bigcap' : '\U000022c2',
+ '\\cup' : '\U0000222a',
+ '\\bigcup' : '\U000022c3',
+ '\\sqcup' : '\U00002294',
+ '\\bigsqcup' : '\U00002a06',
+ '\\sqcap' : '\U00002293',
+ '\\Bigsqcap' : '\U00002a05',
+ '\\setminus' : '\U00002216',
+ '\\propto' : '\U0000221d',
+ '\\uplus' : '\U0000228e',
+ '\\bigplus' : '\U00002a04',
+ '\\sim' : '\U0000223c',
+ '\\doteq' : '\U00002250',
+ '\\simeq' : '\U00002243',
+ '\\approx' : '\U00002248',
+ '\\asymp' : '\U0000224d',
+ '\\cong' : '\U00002245',
+ '\\equiv' : '\U00002261',
+ '\\Join' : '\U000022c8',
+ '\\bowtie' : '\U00002a1d',
+ '\\prec' : '\U0000227a',
+ '\\succ' : '\U0000227b',
+ '\\preceq' : '\U0000227c',
+ '\\succeq' : '\U0000227d',
+ '\\parallel' : '\U00002225',
+ '\\mid' : '\U000000a6',
+ '\\pm' : '\U000000b1',
+ '\\mp' : '\U00002213',
+ '\\times' : '\U000000d7',
+ '\\div' : '\U000000f7',
+ '\\cdot' : '\U000022c5',
+ '\\star' : '\U000022c6',
+ '\\circ' : '\U00002218',
+ '\\dagger' : '\U00002020',
+ '\\ddagger' : '\U00002021',
+ '\\lhd' : '\U000022b2',
+ '\\rhd' : '\U000022b3',
+ '\\unlhd' : '\U000022b4',
+ '\\unrhd' : '\U000022b5',
+ '\\triangleleft' : '\U000025c3',
+ '\\triangleright' : '\U000025b9',
+ '\\triangle' : '\U000025b3',
+ '\\triangleq' : '\U0000225c',
+ '\\oplus' : '\U00002295',
+ '\\bigoplus' : '\U00002a01',
+ '\\otimes' : '\U00002297',
+ '\\bigotimes' : '\U00002a02',
+ '\\odot' : '\U00002299',
+ '\\bigodot' : '\U00002a00',
+ '\\ominus' : '\U00002296',
+ '\\oslash' : '\U00002298',
+ '\\dots' : '\U00002026',
+ '\\cdots' : '\U000022ef',
+ '\\sum' : '\U00002211',
+ '\\prod' : '\U0000220f',
+ '\\coprod' : '\U00002210',
+ '\\infty' : '\U0000221e',
+ '\\int' : '\U0000222b',
+ '\\oint' : '\U0000222e',
+ '\\clubsuit' : '\U00002663',
+ '\\diamondsuit' : '\U00002662',
+ '\\heartsuit' : '\U00002661',
+ '\\spadesuit' : '\U00002660',
+ '\\aleph' : '\U00002135',
+ '\\emptyset' : '\U00002205',
+ '\\nabla' : '\U00002207',
+ '\\partial' : '\U00002202',
+ '\\flat' : '\U0000266d',
+ '\\natural' : '\U0000266e',
+ '\\sharp' : '\U0000266f',
+ '\\angle' : '\U00002220',
+ '\\copyright' : '\U000000a9',
+ '\\textregistered' : '\U000000ae',
+ '\\textonequarter' : '\U000000bc',
+ '\\textonehalf' : '\U000000bd',
+ '\\textthreequarters' : '\U000000be',
+ '\\textordfeminine' : '\U000000aa',
+ '\\textordmasculine' : '\U000000ba',
+ '\\euro' : '\U000020ac',
+ '\\pounds' : '\U000000a3',
+ '\\yen' : '\U000000a5',
+ '\\textcent' : '\U000000a2',
+ '\\textcurrency' : '\U000000a4',
+ '\\textdegree' : '\U000000b0',
+ }
+
+ isabelle_symbols = {
+ '\\<zero>' : '\U0001d7ec',
+ '\\<one>' : '\U0001d7ed',
+ '\\<two>' : '\U0001d7ee',
+ '\\<three>' : '\U0001d7ef',
+ '\\<four>' : '\U0001d7f0',
+ '\\<five>' : '\U0001d7f1',
+ '\\<six>' : '\U0001d7f2',
+ '\\<seven>' : '\U0001d7f3',
+ '\\<eight>' : '\U0001d7f4',
+ '\\<nine>' : '\U0001d7f5',
+ '\\<A>' : '\U0001d49c',
+ '\\<B>' : '\U0000212c',
+ '\\<C>' : '\U0001d49e',
+ '\\<D>' : '\U0001d49f',
+ '\\<E>' : '\U00002130',
+ '\\<F>' : '\U00002131',
+ '\\<G>' : '\U0001d4a2',
+ '\\<H>' : '\U0000210b',
+ '\\<I>' : '\U00002110',
+ '\\<J>' : '\U0001d4a5',
+ '\\<K>' : '\U0001d4a6',
+ '\\<L>' : '\U00002112',
+ '\\<M>' : '\U00002133',
+ '\\<N>' : '\U0001d4a9',
+ '\\<O>' : '\U0001d4aa',
+ '\\<P>' : '\U0001d4ab',
+ '\\<Q>' : '\U0001d4ac',
+ '\\<R>' : '\U0000211b',
+ '\\<S>' : '\U0001d4ae',
+ '\\<T>' : '\U0001d4af',
+ '\\<U>' : '\U0001d4b0',
+ '\\<V>' : '\U0001d4b1',
+ '\\<W>' : '\U0001d4b2',
+ '\\<X>' : '\U0001d4b3',
+ '\\<Y>' : '\U0001d4b4',
+ '\\<Z>' : '\U0001d4b5',
+ '\\<a>' : '\U0001d5ba',
+ '\\<b>' : '\U0001d5bb',
+ '\\<c>' : '\U0001d5bc',
+ '\\<d>' : '\U0001d5bd',
+ '\\<e>' : '\U0001d5be',
+ '\\<f>' : '\U0001d5bf',
+ '\\<g>' : '\U0001d5c0',
+ '\\<h>' : '\U0001d5c1',
+ '\\<i>' : '\U0001d5c2',
+ '\\<j>' : '\U0001d5c3',
+ '\\<k>' : '\U0001d5c4',
+ '\\<l>' : '\U0001d5c5',
+ '\\<m>' : '\U0001d5c6',
+ '\\<n>' : '\U0001d5c7',
+ '\\<o>' : '\U0001d5c8',
+ '\\<p>' : '\U0001d5c9',
+ '\\<q>' : '\U0001d5ca',
+ '\\<r>' : '\U0001d5cb',
+ '\\<s>' : '\U0001d5cc',
+ '\\<t>' : '\U0001d5cd',
+ '\\<u>' : '\U0001d5ce',
+ '\\<v>' : '\U0001d5cf',
+ '\\<w>' : '\U0001d5d0',
+ '\\<x>' : '\U0001d5d1',
+ '\\<y>' : '\U0001d5d2',
+ '\\<z>' : '\U0001d5d3',
+ '\\<AA>' : '\U0001d504',
+ '\\<BB>' : '\U0001d505',
+ '\\<CC>' : '\U0000212d',
+ '\\<DD>' : '\U0001d507',
+ '\\<EE>' : '\U0001d508',
+ '\\<FF>' : '\U0001d509',
+ '\\<GG>' : '\U0001d50a',
+ '\\<HH>' : '\U0000210c',
+ '\\<II>' : '\U00002111',
+ '\\<JJ>' : '\U0001d50d',
+ '\\<KK>' : '\U0001d50e',
+ '\\<LL>' : '\U0001d50f',
+ '\\<MM>' : '\U0001d510',
+ '\\<NN>' : '\U0001d511',
+ '\\<OO>' : '\U0001d512',
+ '\\<PP>' : '\U0001d513',
+ '\\<QQ>' : '\U0001d514',
+ '\\<RR>' : '\U0000211c',
+ '\\<SS>' : '\U0001d516',
+ '\\<TT>' : '\U0001d517',
+ '\\<UU>' : '\U0001d518',
+ '\\<VV>' : '\U0001d519',
+ '\\<WW>' : '\U0001d51a',
+ '\\<XX>' : '\U0001d51b',
+ '\\<YY>' : '\U0001d51c',
+ '\\<ZZ>' : '\U00002128',
+ '\\<aa>' : '\U0001d51e',
+ '\\<bb>' : '\U0001d51f',
+ '\\<cc>' : '\U0001d520',
+ '\\<dd>' : '\U0001d521',
+ '\\<ee>' : '\U0001d522',
+ '\\<ff>' : '\U0001d523',
+ '\\<gg>' : '\U0001d524',
+ '\\<hh>' : '\U0001d525',
+ '\\<ii>' : '\U0001d526',
+ '\\<jj>' : '\U0001d527',
+ '\\<kk>' : '\U0001d528',
+ '\\<ll>' : '\U0001d529',
+ '\\<mm>' : '\U0001d52a',
+ '\\<nn>' : '\U0001d52b',
+ '\\<oo>' : '\U0001d52c',
+ '\\<pp>' : '\U0001d52d',
+ '\\<qq>' : '\U0001d52e',
+ '\\<rr>' : '\U0001d52f',
+ '\\<ss>' : '\U0001d530',
+ '\\<tt>' : '\U0001d531',
+ '\\<uu>' : '\U0001d532',
+ '\\<vv>' : '\U0001d533',
+ '\\<ww>' : '\U0001d534',
+ '\\<xx>' : '\U0001d535',
+ '\\<yy>' : '\U0001d536',
+ '\\<zz>' : '\U0001d537',
+ '\\<alpha>' : '\U000003b1',
+ '\\<beta>' : '\U000003b2',
+ '\\<gamma>' : '\U000003b3',
+ '\\<delta>' : '\U000003b4',
+ '\\<epsilon>' : '\U000003b5',
+ '\\<zeta>' : '\U000003b6',
+ '\\<eta>' : '\U000003b7',
+ '\\<theta>' : '\U000003b8',
+ '\\<iota>' : '\U000003b9',
+ '\\<kappa>' : '\U000003ba',
+ '\\<lambda>' : '\U000003bb',
+ '\\<mu>' : '\U000003bc',
+ '\\<nu>' : '\U000003bd',
+ '\\<xi>' : '\U000003be',
+ '\\<pi>' : '\U000003c0',
+ '\\<rho>' : '\U000003c1',
+ '\\<sigma>' : '\U000003c3',
+ '\\<tau>' : '\U000003c4',
+ '\\<upsilon>' : '\U000003c5',
+ '\\<phi>' : '\U000003c6',
+ '\\<chi>' : '\U000003c7',
+ '\\<psi>' : '\U000003c8',
+ '\\<omega>' : '\U000003c9',
+ '\\<Gamma>' : '\U00000393',
+ '\\<Delta>' : '\U00000394',
+ '\\<Theta>' : '\U00000398',
+ '\\<Lambda>' : '\U0000039b',
+ '\\<Xi>' : '\U0000039e',
+ '\\<Pi>' : '\U000003a0',
+ '\\<Sigma>' : '\U000003a3',
+ '\\<Upsilon>' : '\U000003a5',
+ '\\<Phi>' : '\U000003a6',
+ '\\<Psi>' : '\U000003a8',
+ '\\<Omega>' : '\U000003a9',
+ '\\<bool>' : '\U0001d539',
+ '\\<complex>' : '\U00002102',
+ '\\<nat>' : '\U00002115',
+ '\\<rat>' : '\U0000211a',
+ '\\<real>' : '\U0000211d',
+ '\\<int>' : '\U00002124',
+ '\\<leftarrow>' : '\U00002190',
+ '\\<longleftarrow>' : '\U000027f5',
+ '\\<rightarrow>' : '\U00002192',
+ '\\<longrightarrow>' : '\U000027f6',
+ '\\<Leftarrow>' : '\U000021d0',
+ '\\<Longleftarrow>' : '\U000027f8',
+ '\\<Rightarrow>' : '\U000021d2',
+ '\\<Longrightarrow>' : '\U000027f9',
+ '\\<leftrightarrow>' : '\U00002194',
+ '\\<longleftrightarrow>' : '\U000027f7',
+ '\\<Leftrightarrow>' : '\U000021d4',
+ '\\<Longleftrightarrow>' : '\U000027fa',
+ '\\<mapsto>' : '\U000021a6',
+ '\\<longmapsto>' : '\U000027fc',
+ '\\<midarrow>' : '\U00002500',
+ '\\<Midarrow>' : '\U00002550',
+ '\\<hookleftarrow>' : '\U000021a9',
+ '\\<hookrightarrow>' : '\U000021aa',
+ '\\<leftharpoondown>' : '\U000021bd',
+ '\\<rightharpoondown>' : '\U000021c1',
+ '\\<leftharpoonup>' : '\U000021bc',
+ '\\<rightharpoonup>' : '\U000021c0',
+ '\\<rightleftharpoons>' : '\U000021cc',
+ '\\<leadsto>' : '\U0000219d',
+ '\\<downharpoonleft>' : '\U000021c3',
+ '\\<downharpoonright>' : '\U000021c2',
+ '\\<upharpoonleft>' : '\U000021bf',
+ '\\<upharpoonright>' : '\U000021be',
+ '\\<restriction>' : '\U000021be',
+ '\\<Colon>' : '\U00002237',
+ '\\<up>' : '\U00002191',
+ '\\<Up>' : '\U000021d1',
+ '\\<down>' : '\U00002193',
+ '\\<Down>' : '\U000021d3',
+ '\\<updown>' : '\U00002195',
+ '\\<Updown>' : '\U000021d5',
+ '\\<langle>' : '\U000027e8',
+ '\\<rangle>' : '\U000027e9',
+ '\\<lceil>' : '\U00002308',
+ '\\<rceil>' : '\U00002309',
+ '\\<lfloor>' : '\U0000230a',
+ '\\<rfloor>' : '\U0000230b',
+ '\\<lparr>' : '\U00002987',
+ '\\<rparr>' : '\U00002988',
+ '\\<lbrakk>' : '\U000027e6',
+ '\\<rbrakk>' : '\U000027e7',
+ '\\<lbrace>' : '\U00002983',
+ '\\<rbrace>' : '\U00002984',
+ '\\<guillemotleft>' : '\U000000ab',
+ '\\<guillemotright>' : '\U000000bb',
+ '\\<bottom>' : '\U000022a5',
+ '\\<top>' : '\U000022a4',
+ '\\<and>' : '\U00002227',
+ '\\<And>' : '\U000022c0',
+ '\\<or>' : '\U00002228',
+ '\\<Or>' : '\U000022c1',
+ '\\<forall>' : '\U00002200',
+ '\\<exists>' : '\U00002203',
+ '\\<nexists>' : '\U00002204',
+ '\\<not>' : '\U000000ac',
+ '\\<box>' : '\U000025a1',
+ '\\<diamond>' : '\U000025c7',
+ '\\<turnstile>' : '\U000022a2',
+ '\\<Turnstile>' : '\U000022a8',
+ '\\<tturnstile>' : '\U000022a9',
+ '\\<TTurnstile>' : '\U000022ab',
+ '\\<stileturn>' : '\U000022a3',
+ '\\<surd>' : '\U0000221a',
+ '\\<le>' : '\U00002264',
+ '\\<ge>' : '\U00002265',
+ '\\<lless>' : '\U0000226a',
+ '\\<ggreater>' : '\U0000226b',
+ '\\<lesssim>' : '\U00002272',
+ '\\<greatersim>' : '\U00002273',
+ '\\<lessapprox>' : '\U00002a85',
+ '\\<greaterapprox>' : '\U00002a86',
+ '\\<in>' : '\U00002208',
+ '\\<notin>' : '\U00002209',
+ '\\<subset>' : '\U00002282',
+ '\\<supset>' : '\U00002283',
+ '\\<subseteq>' : '\U00002286',
+ '\\<supseteq>' : '\U00002287',
+ '\\<sqsubset>' : '\U0000228f',
+ '\\<sqsupset>' : '\U00002290',
+ '\\<sqsubseteq>' : '\U00002291',
+ '\\<sqsupseteq>' : '\U00002292',
+ '\\<inter>' : '\U00002229',
+ '\\<Inter>' : '\U000022c2',
+ '\\<union>' : '\U0000222a',
+ '\\<Union>' : '\U000022c3',
+ '\\<squnion>' : '\U00002294',
+ '\\<Squnion>' : '\U00002a06',
+ '\\<sqinter>' : '\U00002293',
+ '\\<Sqinter>' : '\U00002a05',
+ '\\<setminus>' : '\U00002216',
+ '\\<propto>' : '\U0000221d',
+ '\\<uplus>' : '\U0000228e',
+ '\\<Uplus>' : '\U00002a04',
+ '\\<noteq>' : '\U00002260',
+ '\\<sim>' : '\U0000223c',
+ '\\<doteq>' : '\U00002250',
+ '\\<simeq>' : '\U00002243',
+ '\\<approx>' : '\U00002248',
+ '\\<asymp>' : '\U0000224d',
+ '\\<cong>' : '\U00002245',
+ '\\<smile>' : '\U00002323',
+ '\\<equiv>' : '\U00002261',
+ '\\<frown>' : '\U00002322',
+ '\\<Join>' : '\U000022c8',
+ '\\<bowtie>' : '\U00002a1d',
+ '\\<prec>' : '\U0000227a',
+ '\\<succ>' : '\U0000227b',
+ '\\<preceq>' : '\U0000227c',
+ '\\<succeq>' : '\U0000227d',
+ '\\<parallel>' : '\U00002225',
+ '\\<bar>' : '\U000000a6',
+ '\\<plusminus>' : '\U000000b1',
+ '\\<minusplus>' : '\U00002213',
+ '\\<times>' : '\U000000d7',
+ '\\<div>' : '\U000000f7',
+ '\\<cdot>' : '\U000022c5',
+ '\\<star>' : '\U000022c6',
+ '\\<bullet>' : '\U00002219',
+ '\\<circ>' : '\U00002218',
+ '\\<dagger>' : '\U00002020',
+ '\\<ddagger>' : '\U00002021',
+ '\\<lhd>' : '\U000022b2',
+ '\\<rhd>' : '\U000022b3',
+ '\\<unlhd>' : '\U000022b4',
+ '\\<unrhd>' : '\U000022b5',
+ '\\<triangleleft>' : '\U000025c3',
+ '\\<triangleright>' : '\U000025b9',
+ '\\<triangle>' : '\U000025b3',
+ '\\<triangleq>' : '\U0000225c',
+ '\\<oplus>' : '\U00002295',
+ '\\<Oplus>' : '\U00002a01',
+ '\\<otimes>' : '\U00002297',
+ '\\<Otimes>' : '\U00002a02',
+ '\\<odot>' : '\U00002299',
+ '\\<Odot>' : '\U00002a00',
+ '\\<ominus>' : '\U00002296',
+ '\\<oslash>' : '\U00002298',
+ '\\<dots>' : '\U00002026',
+ '\\<cdots>' : '\U000022ef',
+ '\\<Sum>' : '\U00002211',
+ '\\<Prod>' : '\U0000220f',
+ '\\<Coprod>' : '\U00002210',
+ '\\<infinity>' : '\U0000221e',
+ '\\<integral>' : '\U0000222b',
+ '\\<ointegral>' : '\U0000222e',
+ '\\<clubsuit>' : '\U00002663',
+ '\\<diamondsuit>' : '\U00002662',
+ '\\<heartsuit>' : '\U00002661',
+ '\\<spadesuit>' : '\U00002660',
+ '\\<aleph>' : '\U00002135',
+ '\\<emptyset>' : '\U00002205',
+ '\\<nabla>' : '\U00002207',
+ '\\<partial>' : '\U00002202',
+ '\\<flat>' : '\U0000266d',
+ '\\<natural>' : '\U0000266e',
+ '\\<sharp>' : '\U0000266f',
+ '\\<angle>' : '\U00002220',
+ '\\<copyright>' : '\U000000a9',
+ '\\<registered>' : '\U000000ae',
+ '\\<hyphen>' : '\U000000ad',
+ '\\<inverse>' : '\U000000af',
+ '\\<onequarter>' : '\U000000bc',
+ '\\<onehalf>' : '\U000000bd',
+ '\\<threequarters>' : '\U000000be',
+ '\\<ordfeminine>' : '\U000000aa',
+ '\\<ordmasculine>' : '\U000000ba',
+ '\\<section>' : '\U000000a7',
+ '\\<paragraph>' : '\U000000b6',
+ '\\<exclamdown>' : '\U000000a1',
+ '\\<questiondown>' : '\U000000bf',
+ '\\<euro>' : '\U000020ac',
+ '\\<pounds>' : '\U000000a3',
+ '\\<yen>' : '\U000000a5',
+ '\\<cent>' : '\U000000a2',
+ '\\<currency>' : '\U000000a4',
+ '\\<degree>' : '\U000000b0',
+ '\\<amalg>' : '\U00002a3f',
+ '\\<mho>' : '\U00002127',
+ '\\<lozenge>' : '\U000025ca',
+ '\\<wp>' : '\U00002118',
+ '\\<wrong>' : '\U00002240',
+ '\\<struct>' : '\U000022c4',
+ '\\<acute>' : '\U000000b4',
+ '\\<index>' : '\U00000131',
+ '\\<dieresis>' : '\U000000a8',
+ '\\<cedilla>' : '\U000000b8',
+ '\\<hungarumlaut>' : '\U000002dd',
+ '\\<some>' : '\U000003f5',
+ '\\<newline>' : '\U000023ce',
+ '\\<open>' : '\U00002039',
+ '\\<close>' : '\U0000203a',
+ '\\<here>' : '\U00002302',
+ '\\<^sub>' : '\U000021e9',
+ '\\<^sup>' : '\U000021e7',
+ '\\<^bold>' : '\U00002759',
+ '\\<^bsub>' : '\U000021d8',
+ '\\<^esub>' : '\U000021d9',
+ '\\<^bsup>' : '\U000021d7',
+ '\\<^esup>' : '\U000021d6',
+ }
+
+ lang_map = {'isabelle' : isabelle_symbols, 'latex' : latex_symbols}
+
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+ lang = get_choice_opt(options, 'lang',
+ ['isabelle', 'latex'], 'isabelle')
+ self.symbols = self.lang_map[lang]
+
+ def filter(self, lexer, stream):
+ for ttype, value in stream:
+ if value in self.symbols:
+ yield ttype, self.symbols[value]
+ else:
+ yield ttype, value
+
+
class KeywordCaseFilter(Filter):
"""Convert keywords to lowercase or uppercase or capitalize them, which
means first letter uppercase, rest lowercase.
@@ -699,7 +699,7 @@ class KeywordCaseFilter(Filter):
Filter.__init__(self, **options)
case = get_choice_opt(options, 'case',
['lower', 'upper', 'capitalize'], 'lower')
- self.convert = getattr(str, case)
+ self.convert = getattr(str, case)
def filter(self, lexer, stream):
for ttype, value in stream:
@@ -815,11 +815,11 @@ class VisibleWhitespaceFilter(Filter):
def __init__(self, **options):
Filter.__init__(self, **options)
- for name, default in [('spaces', '·'),
- ('tabs', '»'),
- ('newlines', '¶')]:
+ for name, default in [('spaces', '·'),
+ ('tabs', '»'),
+ ('newlines', '¶')]:
opt = options.get(name, False)
- if isinstance(opt, str) and len(opt) == 1:
+ if isinstance(opt, str) and len(opt) == 1:
setattr(self, name, opt)
else:
setattr(self, name, (opt and default or ''))
@@ -832,11 +832,11 @@ class VisibleWhitespaceFilter(Filter):
def filter(self, lexer, stream):
if self.wstt:
- spaces = self.spaces or ' '
- tabs = self.tabs or '\t'
- newlines = self.newlines or '\n'
+ spaces = self.spaces or ' '
+ tabs = self.tabs or '\t'
+ newlines = self.newlines or '\n'
regex = re.compile(r'\s')
-
+
def replacefunc(wschar):
if wschar == ' ':
return spaces
@@ -847,8 +847,8 @@ class VisibleWhitespaceFilter(Filter):
return wschar
for ttype, value in stream:
- yield from _replace_special(ttype, value, regex, Whitespace,
- replacefunc)
+ yield from _replace_special(ttype, value, regex, Whitespace,
+ replacefunc)
else:
spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
# simpler processing
@@ -884,18 +884,18 @@ class GobbleFilter(Filter):
if left < len(value):
return value[left:], 0
else:
- return '', left - len(value)
+ return '', left - len(value)
def filter(self, lexer, stream):
n = self.n
- left = n # How many characters left to gobble.
+ left = n # How many characters left to gobble.
for ttype, value in stream:
# Remove ``left`` tokens from first line, ``n`` from all others.
parts = value.split('\n')
(parts[0], left) = self.gobble(parts[0], left)
for i in range(1, len(parts)):
(parts[i], left) = self.gobble(parts[i], n)
- value = '\n'.join(parts)
+ value = '\n'.join(parts)
if value != '':
yield ttype, value
@@ -933,5 +933,5 @@ FILTERS = {
'whitespace': VisibleWhitespaceFilter,
'gobble': GobbleFilter,
'tokenmerge': TokenMergeFilter,
- 'symbols': SymbolFilter,
+ 'symbols': SymbolFilter,
}
diff --git a/contrib/python/Pygments/py3/pygments/formatter.py b/contrib/python/Pygments/py3/pygments/formatter.py
index c3fe68d3ef..9de8789d46 100644
--- a/contrib/python/Pygments/py3/pygments/formatter.py
+++ b/contrib/python/Pygments/py3/pygments/formatter.py
@@ -4,25 +4,25 @@
Base formatter class.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import codecs
-from pygments.util import get_bool_opt
+from pygments.util import get_bool_opt
from pygments.styles import get_style_by_name
__all__ = ['Formatter']
def _lookup_style(style):
- if isinstance(style, str):
+ if isinstance(style, str):
return get_style_by_name(style)
return style
-class Formatter:
+class Formatter:
"""
Converts a token stream to text.
diff --git a/contrib/python/Pygments/py3/pygments/formatters/__init__.py b/contrib/python/Pygments/py3/pygments/formatters/__init__.py
index 66c9e9d404..9f73505773 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/__init__.py
@@ -4,7 +4,7 @@
Pygments formatters.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -16,7 +16,7 @@ from os.path import basename
from pygments.formatters._mapping import FORMATTERS
from pygments.plugin import find_plugin_formatters
-from pygments.util import ClassNotFound
+from pygments.util import ClassNotFound
__all__ = ['get_formatter_by_name', 'get_formatter_for_filename',
'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS)
@@ -44,7 +44,7 @@ def _load_formatters(module_name):
def get_all_formatters():
"""Return a generator for all formatter classes."""
# NB: this returns formatter classes, not info like get_all_lexers().
- for info in FORMATTERS.values():
+ for info in FORMATTERS.values():
if info[1] not in _formatter_cache:
_load_formatters(info[0])
yield _formatter_cache[info[1]]
@@ -57,7 +57,7 @@ def find_formatter_class(alias):
Returns None if not found.
"""
- for module_name, name, aliases, _, _ in FORMATTERS.values():
+ for module_name, name, aliases, _, _ in FORMATTERS.values():
if alias in aliases:
if name not in _formatter_cache:
_load_formatters(module_name)
@@ -106,9 +106,9 @@ def load_formatter_from_file(filename, formattername="CustomFormatter",
formatter_class = custom_namespace[formattername]
# And finally instantiate it with the options
return formatter_class(**options)
- except OSError as err:
- raise ClassNotFound('cannot read %s: %s' % (filename, err))
- except ClassNotFound:
+ except OSError as err:
+ raise ClassNotFound('cannot read %s: %s' % (filename, err))
+ except ClassNotFound:
raise
except Exception as err:
raise ClassNotFound('error when loading custom formatter: %s' % err)
@@ -120,7 +120,7 @@ def get_formatter_for_filename(fn, **options):
Raises ClassNotFound if not found.
"""
fn = basename(fn)
- for modname, name, _, filenames, _ in FORMATTERS.values():
+ for modname, name, _, filenames, _ in FORMATTERS.values():
for filename in filenames:
if _fn_matches(fn, filename):
if name not in _formatter_cache:
diff --git a/contrib/python/Pygments/py3/pygments/formatters/_mapping.py b/contrib/python/Pygments/py3/pygments/formatters/_mapping.py
index 8b5e478e39..18ec4e67e5 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/_mapping.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/_mapping.py
@@ -8,7 +8,7 @@
Do not alter the FORMATTERS dictionary by hand.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,7 +23,7 @@ FORMATTERS = {
'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
- 'PangoMarkupFormatter': ('pygments.formatters.pangomarkup', 'Pango Markup', ('pango', 'pangomarkup'), (), 'Format tokens as Pango Markup code. It can then be rendered to an SVG.'),
+ 'PangoMarkupFormatter': ('pygments.formatters.pangomarkup', 'Pango Markup', ('pango', 'pangomarkup'), (), 'Format tokens as Pango Markup code. It can then be rendered to an SVG.'),
'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'),
'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
diff --git a/contrib/python/Pygments/py3/pygments/formatters/bbcode.py b/contrib/python/Pygments/py3/pygments/formatters/bbcode.py
index 586a8925c6..035752030b 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/bbcode.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/bbcode.py
@@ -4,7 +4,7 @@
BBcode formatter.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/formatters/html.py b/contrib/python/Pygments/py3/pygments/formatters/html.py
index f3a77a2ddf..4096506ad5 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/html.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/html.py
@@ -4,19 +4,19 @@
Formatter for HTML output.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import functools
+import functools
import os
import sys
import os.path
-from io import StringIO
+from io import StringIO
from pygments.formatter import Formatter
from pygments.token import Token, Text, STANDARD_TYPES
-from pygments.util import get_bool_opt, get_int_opt, get_list_opt
+from pygments.util import get_bool_opt, get_int_opt, get_list_opt
try:
import ctags
@@ -27,11 +27,11 @@ __all__ = ['HtmlFormatter']
_escape_html_table = {
- ord('&'): '&amp;',
- ord('<'): '&lt;',
- ord('>'): '&gt;',
- ord('"'): '&quot;',
- ord("'"): '&#39;',
+ ord('&'): '&amp;',
+ ord('<'): '&lt;',
+ ord('>'): '&gt;',
+ ord('"'): '&quot;',
+ ord("'"): '&#39;',
}
@@ -39,14 +39,14 @@ def escape_html(text, table=_escape_html_table):
"""Escape &, <, > as well as single and double quotes for HTML."""
return text.translate(table)
-
+
def webify(color):
if color.startswith('calc') or color.startswith('var'):
return color
else:
return '#' + color
-
+
def _get_ttype_class(ttype):
fname = STANDARD_TYPES.get(ttype)
if fname:
@@ -61,8 +61,8 @@ def _get_ttype_class(ttype):
CSSFILE_TEMPLATE = '''\
/*
-generated by Pygments <https://pygments.org/>
-Copyright 2006-2021 by the Pygments team.
+generated by Pygments <https://pygments.org/>
+Copyright 2006-2021 by the Pygments team.
Licensed under the BSD license, see LICENSE for details.
*/
%(styledefs)s
@@ -72,8 +72,8 @@ DOC_HEADER = '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
<!--
-generated by Pygments <https://pygments.org/>
-Copyright 2006-2021 by the Pygments team.
+generated by Pygments <https://pygments.org/>
+Copyright 2006-2021 by the Pygments team.
Licensed under the BSD license, see LICENSE for details.
-->
<html>
@@ -205,10 +205,10 @@ class HtmlFormatter(Formatter):
`cssfile` exists.
`noclasses`
- If set to true, token ``<span>`` tags (as well as line number elements)
- will not use CSS classes, but inline styles. This is not recommended
- for larger pieces of code since it increases output size by quite a bit
- (default: ``False``).
+ If set to true, token ``<span>`` tags (as well as line number elements)
+ will not use CSS classes, but inline styles. This is not recommended
+ for larger pieces of code since it increases output size by quite a bit
+ (default: ``False``).
`classprefix`
Since the token types use relatively short class names, they may clash
@@ -272,9 +272,9 @@ class HtmlFormatter(Formatter):
125%``).
`hl_lines`
- Specify a list of lines to be highlighted. The line numbers are always
- relative to the input (i.e. the first line is line 1) and are
- independent of `linenostart`.
+ Specify a list of lines to be highlighted. The line numbers are always
+ relative to the input (i.e. the first line is line 1) and are
+ independent of `linenostart`.
.. versionadded:: 0.11
@@ -305,7 +305,7 @@ class HtmlFormatter(Formatter):
`lineanchors`
If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
- output line in an anchor tag with an ``id`` (and `name`) of ``foo-linenumber``.
+ output line in an anchor tag with an ``id`` (and `name`) of ``foo-linenumber``.
This allows easy linking to certain lines.
.. versionadded:: 0.9
@@ -337,9 +337,9 @@ class HtmlFormatter(Formatter):
`filename`
A string used to generate a filename when rendering ``<pre>`` blocks,
- for example if displaying source code. If `linenos` is set to
- ``'table'`` then the filename will be rendered in an initial row
- containing a single `<th>` which spans both columns.
+ for example if displaying source code. If `linenos` is set to
+ ``'table'`` then the filename will be rendered in an initial row
+ containing a single `<th>` which spans both columns.
.. versionadded:: 2.1
@@ -424,7 +424,7 @@ class HtmlFormatter(Formatter):
self.tagurlformat = self._decodeifneeded(options.get('tagurlformat', ''))
self.filename = self._decodeifneeded(options.get('filename', ''))
self.wrapcode = get_bool_opt(options, 'wrapcode', False)
- self.span_element_openers = {}
+ self.span_element_openers = {}
self.debug_token_types = get_bool_opt(options, 'debug_token_types', False)
if self.tagsfile:
@@ -445,10 +445,10 @@ class HtmlFormatter(Formatter):
self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
self.nobackground = get_bool_opt(options, 'nobackground', False)
- self.lineseparator = options.get('lineseparator', '\n')
+ self.lineseparator = options.get('lineseparator', '\n')
self.lineanchors = options.get('lineanchors', '')
self.linespans = options.get('linespans', '')
- self.anchorlinenos = get_bool_opt(options, 'anchorlinenos', False)
+ self.anchorlinenos = get_bool_opt(options, 'anchorlinenos', False)
self.hl_lines = set()
for lineno in get_list_opt(options, 'hl_lines', []):
try:
@@ -467,21 +467,21 @@ class HtmlFormatter(Formatter):
return ''
def _get_css_classes(self, ttype):
- """Return the CSS classes of this token type prefixed with the classprefix option."""
+ """Return the CSS classes of this token type prefixed with the classprefix option."""
cls = self._get_css_class(ttype)
while ttype not in STANDARD_TYPES:
ttype = ttype.parent
cls = self._get_css_class(ttype) + ' ' + cls
- return cls or ''
-
- def _get_css_inline_styles(self, ttype):
- """Return the inline CSS styles for this token type."""
- cclass = self.ttype2class.get(ttype)
- while cclass is None:
- ttype = ttype.parent
- cclass = self.ttype2class.get(ttype)
- return cclass or ''
-
+ return cls or ''
+
+ def _get_css_inline_styles(self, ttype):
+ """Return the inline CSS styles for this token type."""
+ cclass = self.ttype2class.get(ttype)
+ while cclass is None:
+ ttype = ttype.parent
+ cclass = self.ttype2class.get(ttype)
+ return cclass or ''
+
def _create_stylesheet(self):
t2c = self.ttype2class = {Token: ''}
c2s = self.class2style = {}
@@ -512,69 +512,69 @@ class HtmlFormatter(Formatter):
highlighting style. ``arg`` can be a string or list of selectors to
insert before the token type classes.
"""
- style_lines = []
-
- style_lines.extend(self.get_linenos_style_defs())
- style_lines.extend(self.get_background_style_defs(arg))
- style_lines.extend(self.get_token_style_defs(arg))
-
- return '\n'.join(style_lines)
-
- def get_token_style_defs(self, arg=None):
- prefix = self.get_css_prefix(arg)
-
- styles = [
- (level, ttype, cls, style)
- for cls, (style, ttype, level) in self.class2style.items()
- if cls and style
- ]
- styles.sort()
-
- lines = [
- '%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:])
- for (level, ttype, cls, style) in styles
- ]
-
- return lines
-
- def get_background_style_defs(self, arg=None):
- prefix = self.get_css_prefix(arg)
- bg_color = self.style.background_color
- hl_color = self.style.highlight_color
-
- lines = []
-
- if arg and not self.nobackground and bg_color is not None:
- text_style = ''
- if Text in self.ttype2class:
- text_style = ' ' + self.class2style[self.ttype2class[Text]][0]
- lines.insert(
- 0, '%s{ background: %s;%s }' % (
- prefix(''), bg_color, text_style
- )
- )
- if hl_color is not None:
- lines.insert(
- 0, '%s { background-color: %s }' % (prefix('hll'), hl_color)
- )
-
- return lines
-
- def get_linenos_style_defs(self):
- lines = [
- 'pre { %s }' % self._pre_style,
- 'td.linenos .normal { %s }' % self._linenos_style,
- 'span.linenos { %s }' % self._linenos_style,
- 'td.linenos .special { %s }' % self._linenos_special_style,
- 'span.linenos.special { %s }' % self._linenos_special_style,
- ]
-
- return lines
-
- def get_css_prefix(self, arg):
+ style_lines = []
+
+ style_lines.extend(self.get_linenos_style_defs())
+ style_lines.extend(self.get_background_style_defs(arg))
+ style_lines.extend(self.get_token_style_defs(arg))
+
+ return '\n'.join(style_lines)
+
+ def get_token_style_defs(self, arg=None):
+ prefix = self.get_css_prefix(arg)
+
+ styles = [
+ (level, ttype, cls, style)
+ for cls, (style, ttype, level) in self.class2style.items()
+ if cls and style
+ ]
+ styles.sort()
+
+ lines = [
+ '%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:])
+ for (level, ttype, cls, style) in styles
+ ]
+
+ return lines
+
+ def get_background_style_defs(self, arg=None):
+ prefix = self.get_css_prefix(arg)
+ bg_color = self.style.background_color
+ hl_color = self.style.highlight_color
+
+ lines = []
+
+ if arg and not self.nobackground and bg_color is not None:
+ text_style = ''
+ if Text in self.ttype2class:
+ text_style = ' ' + self.class2style[self.ttype2class[Text]][0]
+ lines.insert(
+ 0, '%s{ background: %s;%s }' % (
+ prefix(''), bg_color, text_style
+ )
+ )
+ if hl_color is not None:
+ lines.insert(
+ 0, '%s { background-color: %s }' % (prefix('hll'), hl_color)
+ )
+
+ return lines
+
+ def get_linenos_style_defs(self):
+ lines = [
+ 'pre { %s }' % self._pre_style,
+ 'td.linenos .normal { %s }' % self._linenos_style,
+ 'span.linenos { %s }' % self._linenos_style,
+ 'td.linenos .special { %s }' % self._linenos_special_style,
+ 'span.linenos.special { %s }' % self._linenos_special_style,
+ ]
+
+ return lines
+
+ def get_css_prefix(self, arg):
if arg is None:
arg = ('cssclass' in self.options and '.'+self.cssclass or '')
- if isinstance(arg, str):
+ if isinstance(arg, str):
args = [arg]
else:
args = list(arg)
@@ -587,26 +587,26 @@ class HtmlFormatter(Formatter):
tmp.append((arg and arg + ' ' or '') + cls)
return ', '.join(tmp)
- return prefix
-
- @property
- def _pre_style(self):
- return 'line-height: 125%;'
-
- @property
- def _linenos_style(self):
- return 'color: %s; background-color: %s; padding-left: 5px; padding-right: 5px;' % (
- self.style.line_number_color,
- self.style.line_number_background_color
- )
-
- @property
- def _linenos_special_style(self):
- return 'color: %s; background-color: %s; padding-left: 5px; padding-right: 5px;' % (
- self.style.line_number_special_color,
- self.style.line_number_special_background_color
- )
-
+ return prefix
+
+ @property
+ def _pre_style(self):
+ return 'line-height: 125%;'
+
+ @property
+ def _linenos_style(self):
+ return 'color: %s; background-color: %s; padding-left: 5px; padding-right: 5px;' % (
+ self.style.line_number_color,
+ self.style.line_number_background_color
+ )
+
+ @property
+ def _linenos_special_style(self):
+ return 'color: %s; background-color: %s; padding-left: 5px; padding-right: 5px;' % (
+ self.style.line_number_special_color,
+ self.style.line_number_special_background_color
+ )
+
def _decodeifneeded(self, value):
if isinstance(value, bytes):
if self.encoding:
@@ -638,7 +638,7 @@ class HtmlFormatter(Formatter):
with open(cssfilename, "w") as cf:
cf.write(CSSFILE_TEMPLATE %
{'styledefs': self.get_style_defs('body')})
- except OSError as err:
+ except OSError as err:
err.strerror = 'Error writing CSS file: ' + err.strerror
raise
@@ -652,7 +652,7 @@ class HtmlFormatter(Formatter):
styledefs=self.get_style_defs('body'),
encoding=self.encoding))
- yield from inner
+ yield from inner
yield 0, DOC_FOOTER
def _wrap_tablelinenos(self, inner):
@@ -671,100 +671,100 @@ class HtmlFormatter(Formatter):
aln = self.anchorlinenos
nocls = self.noclasses
- lines = []
-
- for i in range(fl, fl+lncount):
- print_line = i % st == 0
- special_line = sp and i % sp == 0
-
- if print_line:
- line = '%*d' % (mw, i)
- if aln:
- line = '<a href="#%s-%d">%s</a>' % (la, i, line)
- else:
- line = ' ' * mw
-
- if nocls:
- if special_line:
- style = ' style="%s"' % self._linenos_special_style
+ lines = []
+
+ for i in range(fl, fl+lncount):
+ print_line = i % st == 0
+ special_line = sp and i % sp == 0
+
+ if print_line:
+ line = '%*d' % (mw, i)
+ if aln:
+ line = '<a href="#%s-%d">%s</a>' % (la, i, line)
+ else:
+ line = ' ' * mw
+
+ if nocls:
+ if special_line:
+ style = ' style="%s"' % self._linenos_special_style
else:
- style = ' style="%s"' % self._linenos_style
- else:
- if special_line:
- style = ' class="special"'
+ style = ' style="%s"' % self._linenos_style
+ else:
+ if special_line:
+ style = ' class="special"'
else:
- style = ' class="normal"'
-
- if style:
- line = '<span%s>%s</span>' % (style, line)
-
- lines.append(line)
-
- ls = '\n'.join(lines)
-
- # If a filename was specified, we can't put it into the code table as it
- # would misalign the line numbers. Hence we emit a separate row for it.
- filename_tr = ""
- if self.filename:
- filename_tr = (
- '<tr><th colspan="2" class="filename"><div class="highlight">'
- '<span class="filename">' + self.filename + '</span></div>'
- '</th></tr>')
-
+ style = ' class="normal"'
+
+ if style:
+ line = '<span%s>%s</span>' % (style, line)
+
+ lines.append(line)
+
+ ls = '\n'.join(lines)
+
+ # If a filename was specified, we can't put it into the code table as it
+ # would misalign the line numbers. Hence we emit a separate row for it.
+ filename_tr = ""
+ if self.filename:
+ filename_tr = (
+ '<tr><th colspan="2" class="filename"><div class="highlight">'
+ '<span class="filename">' + self.filename + '</span></div>'
+ '</th></tr>')
+
# in case you wonder about the seemingly redundant <div> here: since the
# content in the other cell also is wrapped in a div, some browsers in
# some configurations seem to mess up the formatting...
- yield 0, (
- '<table class="%stable">' % self.cssclass + filename_tr +
- '<tr><td class="linenos"><div class="linenodiv"><pre>' +
- ls + '</pre></div></td><td class="code">'
- )
+ yield 0, (
+ '<table class="%stable">' % self.cssclass + filename_tr +
+ '<tr><td class="linenos"><div class="linenodiv"><pre>' +
+ ls + '</pre></div></td><td class="code">'
+ )
yield 0, dummyoutfile.getvalue()
yield 0, '</td></tr></table>'
def _wrap_inlinelinenos(self, inner):
# need a list of lines since we need the width of a single number :(
- inner_lines = list(inner)
+ inner_lines = list(inner)
sp = self.linenospecial
st = self.linenostep
num = self.linenostart
- mw = len(str(len(inner_lines) + num - 1))
- la = self.lineanchors
- aln = self.anchorlinenos
- nocls = self.noclasses
-
- for _, inner_line in inner_lines:
- print_line = num % st == 0
- special_line = sp and num % sp == 0
-
- if print_line:
- line = '%*d' % (mw, num)
- else:
- line = ' ' * mw
-
- if nocls:
- if special_line:
- style = ' style="%s"' % self._linenos_special_style
- else:
- style = ' style="%s"' % self._linenos_style
- else:
- if special_line:
- style = ' class="linenos special"'
- else:
- style = ' class="linenos"'
-
- if style:
- linenos = '<span%s>%s</span>' % (style, line)
- else:
- linenos = line
-
- if aln:
- yield 1, ('<a href="#%s-%d">%s</a>' % (la, num, linenos) +
- inner_line)
+ mw = len(str(len(inner_lines) + num - 1))
+ la = self.lineanchors
+ aln = self.anchorlinenos
+ nocls = self.noclasses
+
+ for _, inner_line in inner_lines:
+ print_line = num % st == 0
+ special_line = sp and num % sp == 0
+
+ if print_line:
+ line = '%*d' % (mw, num)
else:
- yield 1, linenos + inner_line
- num += 1
-
+ line = ' ' * mw
+
+ if nocls:
+ if special_line:
+ style = ' style="%s"' % self._linenos_special_style
+ else:
+ style = ' style="%s"' % self._linenos_style
+ else:
+ if special_line:
+ style = ' class="linenos special"'
+ else:
+ style = ' class="linenos"'
+
+ if style:
+ linenos = '<span%s>%s</span>' % (style, line)
+ else:
+ linenos = line
+
+ if aln:
+ yield 1, ('<a href="#%s-%d">%s</a>' % (la, num, linenos) +
+ inner_line)
+ else:
+ yield 1, linenos + inner_line
+ num += 1
+
def _wrap_lineanchors(self, inner):
s = self.lineanchors
# subtract 1 since we have to increment i *before* yielding
@@ -798,7 +798,7 @@ class HtmlFormatter(Formatter):
yield 0, ('<div' + (self.cssclass and ' class="%s"' % self.cssclass) +
(style and (' style="%s"' % style)) + '>')
- yield from inner
+ yield from inner
yield 0, '</div>\n'
def _wrap_pre(self, inner):
@@ -806,28 +806,28 @@ class HtmlFormatter(Formatter):
if self.prestyles:
style.append(self.prestyles)
if self.noclasses:
- style.append(self._pre_style)
+ style.append(self._pre_style)
style = '; '.join(style)
- if self.filename and self.linenos != 1:
+ if self.filename and self.linenos != 1:
yield 0, ('<span class="filename">' + self.filename + '</span>')
# the empty span here is to keep leading empty lines from being
# ignored by HTML parsers
yield 0, ('<pre' + (style and ' style="%s"' % style) + '><span></span>')
- yield from inner
+ yield from inner
yield 0, '</pre>'
def _wrap_code(self, inner):
yield 0, '<code>'
- yield from inner
+ yield from inner
yield 0, '</code>'
- @functools.lru_cache(maxsize=100)
- def _translate_parts(self, value):
- """HTML-escape a value and split it by newlines."""
- return value.translate(_escape_html_table).split('\n')
-
+ @functools.lru_cache(maxsize=100)
+ def _translate_parts(self, value):
+ """HTML-escape a value and split it by newlines."""
+ return value.translate(_escape_html_table).split('\n')
+
def _format_lines(self, tokensource):
"""
Just format the tokens, without any wrapping tags.
@@ -840,26 +840,26 @@ class HtmlFormatter(Formatter):
lspan = ''
line = []
for ttype, value in tokensource:
- try:
- cspan = self.span_element_openers[ttype]
- except KeyError:
+ try:
+ cspan = self.span_element_openers[ttype]
+ except KeyError:
title = ' title="%s"' % '.'.join(ttype) if self.debug_token_types else ''
- if nocls:
- css_style = self._get_css_inline_styles(ttype)
+ if nocls:
+ css_style = self._get_css_inline_styles(ttype)
if css_style:
css_style = self.class2style[css_style][0]
cspan = '<span style="%s"%s>' % (css_style, title)
else:
cspan = ''
- else:
- css_class = self._get_css_classes(ttype)
+ else:
+ css_class = self._get_css_classes(ttype)
if css_class:
cspan = '<span class="%s"%s>' % (css_class, title)
else:
cspan = ''
- self.span_element_openers[ttype] = cspan
+ self.span_element_openers[ttype] = cspan
- parts = self._translate_parts(value)
+ parts = self._translate_parts(value)
if tagsfile and ttype in Token.Name:
filename, linenumber = self._lookup_ctag(value)
@@ -906,7 +906,7 @@ class HtmlFormatter(Formatter):
def _lookup_ctag(self, token):
entry = ctags.TagEntry()
- if self._ctags.find(entry, token.encode(), 0):
+ if self._ctags.find(entry, token.encode(), 0):
return entry['file'], entry['lineNumber']
else:
return None, None
@@ -959,15 +959,15 @@ class HtmlFormatter(Formatter):
linewise, e.g. line number generators.
"""
source = self._format_lines(tokensource)
-
- # As a special case, we wrap line numbers before line highlighting
- # so the line numbers get wrapped in the highlighting tag.
- if not self.nowrap and self.linenos == 2:
- source = self._wrap_inlinelinenos(source)
-
+
+ # As a special case, we wrap line numbers before line highlighting
+ # so the line numbers get wrapped in the highlighting tag.
+ if not self.nowrap and self.linenos == 2:
+ source = self._wrap_inlinelinenos(source)
+
if self.hl_lines:
source = self._highlight_lines(source)
-
+
if not self.nowrap:
if self.lineanchors:
source = self._wrap_lineanchors(source)
diff --git a/contrib/python/Pygments/py3/pygments/formatters/img.py b/contrib/python/Pygments/py3/pygments/formatters/img.py
index f481afc4a4..70ba77242a 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/img.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/img.py
@@ -4,7 +4,7 @@
Formatter for Pixmap output.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,7 +13,7 @@ import sys
from pygments.formatter import Formatter
from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
- get_choice_opt
+ get_choice_opt
import subprocess
@@ -45,9 +45,9 @@ STYLES = {
}
# A sane default for modern systems
-DEFAULT_FONT_NAME_NIX = 'DejaVu Sans Mono'
+DEFAULT_FONT_NAME_NIX = 'DejaVu Sans Mono'
DEFAULT_FONT_NAME_WIN = 'Courier New'
-DEFAULT_FONT_NAME_MAC = 'Menlo'
+DEFAULT_FONT_NAME_MAC = 'Menlo'
class PilNotAvailable(ImportError):
@@ -58,7 +58,7 @@ class FontNotFound(Exception):
"""When there are no usable fonts specified"""
-class FontManager:
+class FontManager:
"""
Manages a set of fonts: normal, italic, bold, etc...
"""
@@ -124,9 +124,9 @@ class FontManager:
for font_dir in (os.path.join(os.getenv("HOME"), 'Library/Fonts/'),
'/Library/Fonts/', '/System/Library/Fonts/'):
font_map.update(
- (os.path.splitext(f)[0].lower(), os.path.join(font_dir, f))
- for f in os.listdir(font_dir)
- if f.lower().endswith(('ttf', 'ttc')))
+ (os.path.splitext(f)[0].lower(), os.path.join(font_dir, f))
+ for f in os.listdir(font_dir)
+ if f.lower().endswith(('ttf', 'ttc')))
for name in STYLES['NORMAL']:
path = self._get_mac_font_path(font_map, self.font_name, name)
@@ -155,7 +155,7 @@ class FontManager:
valname = '%s%s%s' % (basename, style and ' '+style, suffix)
val, _ = _winreg.QueryValueEx(key, valname)
return val
- except OSError:
+ except OSError:
continue
else:
if fail:
@@ -164,43 +164,43 @@ class FontManager:
return None
def _create_win(self):
- lookuperror = None
- keynames = [ (_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'),
- (_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Fonts'),
- (_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'),
- (_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows\CurrentVersion\Fonts') ]
- for keyname in keynames:
+ lookuperror = None
+ keynames = [ (_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'),
+ (_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Fonts'),
+ (_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'),
+ (_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows\CurrentVersion\Fonts') ]
+ for keyname in keynames:
try:
- key = _winreg.OpenKey(*keyname)
- try:
- path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True)
- self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
- for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
- path = self._lookup_win(key, self.font_name, STYLES[style])
- if path:
- self.fonts[style] = ImageFont.truetype(path, self.font_size)
- else:
- if style == 'BOLDITALIC':
- self.fonts[style] = self.fonts['BOLD']
- else:
- self.fonts[style] = self.fonts['NORMAL']
- return
- except FontNotFound as err:
- lookuperror = err
- finally:
- _winreg.CloseKey(key)
- except OSError:
- pass
- else:
- # If we get here, we checked all registry keys and had no luck
- # We can be in one of two situations now:
- # * All key lookups failed. In this case lookuperror is None and we
- # will raise a generic error
- # * At least one lookup failed with a FontNotFound error. In this
- # case, we will raise that as a more specific error
- if lookuperror:
- raise lookuperror
- raise FontNotFound('Can\'t open Windows font registry key')
+ key = _winreg.OpenKey(*keyname)
+ try:
+ path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True)
+ self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
+ for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
+ path = self._lookup_win(key, self.font_name, STYLES[style])
+ if path:
+ self.fonts[style] = ImageFont.truetype(path, self.font_size)
+ else:
+ if style == 'BOLDITALIC':
+ self.fonts[style] = self.fonts['BOLD']
+ else:
+ self.fonts[style] = self.fonts['NORMAL']
+ return
+ except FontNotFound as err:
+ lookuperror = err
+ finally:
+ _winreg.CloseKey(key)
+ except OSError:
+ pass
+ else:
+ # If we get here, we checked all registry keys and had no luck
+ # We can be in one of two situations now:
+ # * All key lookups failed. In this case lookuperror is None and we
+ # will raise a generic error
+ # * At least one lookup failed with a FontNotFound error. In this
+ # case, we will raise that as a more specific error
+ if lookuperror:
+ raise lookuperror
+ raise FontNotFound('Can\'t open Windows font registry key')
def get_char_size(self):
"""
@@ -208,12 +208,12 @@ class FontManager:
"""
return self.fonts['NORMAL'].getsize('M')
- def get_text_size(self, text):
- """
- Get the text size(width, height).
- """
- return self.fonts['NORMAL'].getsize(text)
-
+ def get_text_size(self, text):
+ """
+ Get the text size(width, height).
+ """
+ return self.fonts['NORMAL'].getsize(text)
+
def get_font(self, bold, oblique):
"""
Get the font based on bold and italic flags.
@@ -255,8 +255,8 @@ class ImageFormatter(Formatter):
bold and italic fonts will be generated. This really should be a
monospace font to look sane.
- Default: "Courier New" on Windows, "Menlo" on Mac OS, and
- "DejaVu Sans Mono" on \\*nix
+ Default: "Courier New" on Windows, "Menlo" on Mac OS, and
+ "DejaVu Sans Mono" on \\*nix
`font_size`
The font size in points to be used.
@@ -424,17 +424,17 @@ class ImageFormatter(Formatter):
"""
return self.fontw
- def _get_char_x(self, linelength):
+ def _get_char_x(self, linelength):
"""
Get the X coordinate of a character position.
"""
- return linelength + self.image_pad + self.line_number_width
+ return linelength + self.image_pad + self.line_number_width
- def _get_text_pos(self, linelength, lineno):
+ def _get_text_pos(self, linelength, lineno):
"""
Get the actual position for a character and line position.
"""
- return self._get_char_x(linelength), self._get_line_y(lineno)
+ return self._get_char_x(linelength), self._get_line_y(lineno)
def _get_linenumber_pos(self, lineno):
"""
@@ -452,27 +452,27 @@ class ImageFormatter(Formatter):
fill = '#000'
return fill
- def _get_text_bg_color(self, style):
- """
- Get the correct background color for the token from the style.
- """
- if style['bgcolor'] is not None:
- bg_color = '#' + style['bgcolor']
- else:
- bg_color = None
- return bg_color
-
+ def _get_text_bg_color(self, style):
+ """
+ Get the correct background color for the token from the style.
+ """
+ if style['bgcolor'] is not None:
+ bg_color = '#' + style['bgcolor']
+ else:
+ bg_color = None
+ return bg_color
+
def _get_style_font(self, style):
"""
Get the correct font for the style.
"""
return self.fonts.get_font(style['bold'], style['italic'])
- def _get_image_size(self, maxlinelength, maxlineno):
+ def _get_image_size(self, maxlinelength, maxlineno):
"""
Get the required image size.
"""
- return (self._get_char_x(maxlinelength) + self.image_pad,
+ return (self._get_char_x(maxlinelength) + self.image_pad,
self._get_line_y(maxlineno + 0) + self.image_pad)
def _draw_linenumber(self, posno, lineno):
@@ -484,22 +484,22 @@ class ImageFormatter(Formatter):
str(lineno).rjust(self.line_number_chars),
font=self.fonts.get_font(self.line_number_bold,
self.line_number_italic),
- text_fg=self.line_number_fg,
- text_bg=None,
+ text_fg=self.line_number_fg,
+ text_bg=None,
)
- def _draw_text(self, pos, text, font, text_fg, text_bg):
+ def _draw_text(self, pos, text, font, text_fg, text_bg):
"""
Remember a single drawable tuple to paint later.
"""
- self.drawables.append((pos, text, font, text_fg, text_bg))
+ self.drawables.append((pos, text, font, text_fg, text_bg))
def _create_drawables(self, tokensource):
"""
Create drawables for the token content.
"""
lineno = charno = maxcharno = 0
- maxlinelength = linelength = 0
+ maxlinelength = linelength = 0
for ttype, value in tokensource:
while ttype not in self.styles:
ttype = ttype.parent
@@ -514,23 +514,23 @@ class ImageFormatter(Formatter):
temp = line.rstrip('\n')
if temp:
self._draw_text(
- self._get_text_pos(linelength, lineno),
+ self._get_text_pos(linelength, lineno),
temp,
font = self._get_style_font(style),
- text_fg = self._get_text_color(style),
- text_bg = self._get_text_bg_color(style),
+ text_fg = self._get_text_color(style),
+ text_bg = self._get_text_bg_color(style),
)
- temp_width, temp_hight = self.fonts.get_text_size(temp)
- linelength += temp_width
- maxlinelength = max(maxlinelength, linelength)
+ temp_width, temp_hight = self.fonts.get_text_size(temp)
+ linelength += temp_width
+ maxlinelength = max(maxlinelength, linelength)
charno += len(temp)
maxcharno = max(maxcharno, charno)
if line.endswith('\n'):
# add a line for each extra line in the value
- linelength = 0
+ linelength = 0
charno = 0
lineno += 1
- self.maxlinelength = maxlinelength
+ self.maxlinelength = maxlinelength
self.maxcharno = maxcharno
self.maxlineno = lineno
@@ -540,7 +540,7 @@ class ImageFormatter(Formatter):
"""
if not self.line_numbers:
return
- for p in range(self.maxlineno):
+ for p in range(self.maxlineno):
n = p + self.line_number_start
if (n % self.line_number_step) == 0:
self._draw_linenumber(p, n)
@@ -558,8 +558,8 @@ class ImageFormatter(Formatter):
rectw = self.image_pad + self.line_number_width - self.line_number_pad
draw.rectangle([(0, 0), (rectw, recth)],
fill=self.line_number_bg)
- if self.line_number_separator:
- draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
+ if self.line_number_separator:
+ draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
del draw
def format(self, tokensource, outfile):
@@ -574,7 +574,7 @@ class ImageFormatter(Formatter):
self._draw_line_numbers()
im = Image.new(
'RGB',
- self._get_image_size(self.maxlinelength, self.maxlineno),
+ self._get_image_size(self.maxlinelength, self.maxlineno),
self.background_color
)
self._paint_line_number_bg(im)
@@ -588,11 +588,11 @@ class ImageFormatter(Formatter):
y = self._get_line_y(linenumber - 1)
draw.rectangle([(x, y), (x + rectw, y + recth)],
fill=self.hl_color)
- for pos, value, font, text_fg, text_bg in self.drawables:
- if text_bg:
- text_size = draw.textsize(text=value, font=font)
- draw.rectangle([pos[0], pos[1], pos[0] + text_size[0], pos[1] + text_size[1]], fill=text_bg)
- draw.text(pos, value, font=font, fill=text_fg)
+ for pos, value, font, text_fg, text_bg in self.drawables:
+ if text_bg:
+ text_size = draw.textsize(text=value, font=font)
+ draw.rectangle([pos[0], pos[1], pos[0] + text_size[0], pos[1] + text_size[1]], fill=text_bg)
+ draw.text(pos, value, font=font, fill=text_fg)
im.save(outfile, self.image_format.upper())
diff --git a/contrib/python/Pygments/py3/pygments/formatters/irc.py b/contrib/python/Pygments/py3/pygments/formatters/irc.py
index d8da7a39de..c728f6075d 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/irc.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/irc.py
@@ -4,7 +4,7 @@
Formatter for IRC output
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/formatters/latex.py b/contrib/python/Pygments/py3/pygments/formatters/latex.py
index e32fcebc5a..59ce95f36f 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/latex.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/latex.py
@@ -4,16 +4,16 @@
Formatter for LaTeX fancyvrb output.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-from io import StringIO
+from io import StringIO
from pygments.formatter import Formatter
-from pygments.lexer import Lexer, do_insertions
+from pygments.lexer import Lexer, do_insertions
from pygments.token import Token, STANDARD_TYPES
-from pygments.util import get_bool_opt, get_int_opt
+from pygments.util import get_bool_opt, get_int_opt
__all__ = ['LatexFormatter']
@@ -264,7 +264,7 @@ class LatexFormatter(Formatter):
self.right = self.escapeinside[1]
else:
self.escapeinside = ''
- self.envname = options.get('envname', 'Verbatim')
+ self.envname = options.get('envname', 'Verbatim')
self._create_stylesheet()
@@ -299,13 +299,13 @@ class LatexFormatter(Formatter):
cmndef += (r'\def\$$@tc##1{\textcolor[rgb]{%s}{##1}}' %
rgbcolor(ndef['color']))
if ndef['border']:
- cmndef += (r'\def\$$@bc##1{{\setlength{\fboxsep}{\string -\fboxrule}'
- r'\fcolorbox[rgb]{%s}{%s}{\strut ##1}}}' %
+ cmndef += (r'\def\$$@bc##1{{\setlength{\fboxsep}{\string -\fboxrule}'
+ r'\fcolorbox[rgb]{%s}{%s}{\strut ##1}}}' %
(rgbcolor(ndef['border']),
rgbcolor(ndef['bgcolor'])))
elif ndef['bgcolor']:
- cmndef += (r'\def\$$@bc##1{{\setlength{\fboxsep}{0pt}'
- r'\colorbox[rgb]{%s}{\strut ##1}}}' %
+ cmndef += (r'\def\$$@bc##1{{\setlength{\fboxsep}{0pt}'
+ r'\colorbox[rgb]{%s}{\strut ##1}}}' %
rgbcolor(ndef['bgcolor']))
if cmndef == '':
continue
@@ -320,8 +320,8 @@ class LatexFormatter(Formatter):
"""
cp = self.commandprefix
styles = []
- for name, definition in self.cmd2def.items():
- styles.append(r'\@namedef{%s@tok@%s}{%s}' % (cp, name, definition))
+ for name, definition in self.cmd2def.items():
+ styles.append(r'\@namedef{%s@tok@%s}{%s}' % (cp, name, definition))
return STYLE_TEMPLATE % {'cp': self.commandprefix,
'styles': '\n'.join(styles)}
@@ -334,25 +334,25 @@ class LatexFormatter(Formatter):
realoutfile = outfile
outfile = StringIO()
- outfile.write('\\begin{' + self.envname + '}[commandchars=\\\\\\{\\}')
+ outfile.write('\\begin{' + self.envname + '}[commandchars=\\\\\\{\\}')
if self.linenos:
start, step = self.linenostart, self.linenostep
- outfile.write(',numbers=left' +
- (start and ',firstnumber=%d' % start or '') +
- (step and ',stepnumber=%d' % step or ''))
+ outfile.write(',numbers=left' +
+ (start and ',firstnumber=%d' % start or '') +
+ (step and ',stepnumber=%d' % step or ''))
if self.mathescape or self.texcomments or self.escapeinside:
- outfile.write(',codes={\\catcode`\\$=3\\catcode`\\^=7'
- '\\catcode`\\_=8\\relax}')
+ outfile.write(',codes={\\catcode`\\$=3\\catcode`\\^=7'
+ '\\catcode`\\_=8\\relax}')
if self.verboptions:
- outfile.write(',' + self.verboptions)
- outfile.write(']\n')
+ outfile.write(',' + self.verboptions)
+ outfile.write(']\n')
for ttype, value in tokensource:
if ttype in Token.Comment:
if self.texcomments:
# Try to guess comment starting lexeme and escape it ...
start = value[0:1]
- for i in range(1, len(value)):
+ for i in range(1, len(value)):
if start[0] != value[i]:
break
start += value[i]
@@ -408,7 +408,7 @@ class LatexFormatter(Formatter):
else:
outfile.write(value)
- outfile.write('\\end{' + self.envname + '}\n')
+ outfile.write('\\end{' + self.envname + '}\n')
if self.full:
encoding = self.encoding or 'utf8'
@@ -445,44 +445,44 @@ class LatexEmbeddedLexer(Lexer):
Lexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
- # find and remove all the escape tokens (replace with an empty string)
- # this is very similar to DelegatingLexer.get_tokens_unprocessed.
- buffered = ''
- insertions = []
- insertion_buf = []
- for i, t, v in self._find_safe_escape_tokens(text):
- if t is None:
- if insertion_buf:
- insertions.append((len(buffered), insertion_buf))
- insertion_buf = []
- buffered += v
- else:
- insertion_buf.append((i, t, v))
- if insertion_buf:
- insertions.append((len(buffered), insertion_buf))
- return do_insertions(insertions,
- self.lang.get_tokens_unprocessed(buffered))
-
- def _find_safe_escape_tokens(self, text):
- """ find escape tokens that are not in strings or comments """
- for i, t, v in self._filter_to(
- self.lang.get_tokens_unprocessed(text),
- lambda t: t in Token.Comment or t in Token.String
- ):
- if t is None:
- for i2, t2, v2 in self._find_escape_tokens(v):
- yield i + i2, t2, v2
- else:
- yield i, None, v
-
- def _filter_to(self, it, pred):
- """ Keep only the tokens that match `pred`, merge the others together """
+ # find and remove all the escape tokens (replace with an empty string)
+ # this is very similar to DelegatingLexer.get_tokens_unprocessed.
+ buffered = ''
+ insertions = []
+ insertion_buf = []
+ for i, t, v in self._find_safe_escape_tokens(text):
+ if t is None:
+ if insertion_buf:
+ insertions.append((len(buffered), insertion_buf))
+ insertion_buf = []
+ buffered += v
+ else:
+ insertion_buf.append((i, t, v))
+ if insertion_buf:
+ insertions.append((len(buffered), insertion_buf))
+ return do_insertions(insertions,
+ self.lang.get_tokens_unprocessed(buffered))
+
+ def _find_safe_escape_tokens(self, text):
+ """ find escape tokens that are not in strings or comments """
+ for i, t, v in self._filter_to(
+ self.lang.get_tokens_unprocessed(text),
+ lambda t: t in Token.Comment or t in Token.String
+ ):
+ if t is None:
+ for i2, t2, v2 in self._find_escape_tokens(v):
+ yield i + i2, t2, v2
+ else:
+ yield i, None, v
+
+ def _filter_to(self, it, pred):
+ """ Keep only the tokens that match `pred`, merge the others together """
buf = ''
idx = 0
- for i, t, v in it:
- if pred(t):
+ for i, t, v in it:
+ if pred(t):
if buf:
- yield idx, None, buf
+ yield idx, None, buf
buf = ''
yield i, t, v
else:
@@ -490,16 +490,16 @@ class LatexEmbeddedLexer(Lexer):
idx = i
buf += v
if buf:
- yield idx, None, buf
+ yield idx, None, buf
- def _find_escape_tokens(self, text):
- """ Find escape tokens within text, give token=None otherwise """
- index = 0
+ def _find_escape_tokens(self, text):
+ """ Find escape tokens within text, give token=None otherwise """
+ index = 0
while text:
a, sep1, text = text.partition(self.left)
if a:
- yield index, None, a
- index += len(a)
+ yield index, None, a
+ index += len(a)
if sep1:
b, sep2, text = text.partition(self.right)
if sep2:
diff --git a/contrib/python/Pygments/py3/pygments/formatters/other.py b/contrib/python/Pygments/py3/pygments/formatters/other.py
index 1a12c42b96..4c79ddfc2c 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/other.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/other.py
@@ -4,12 +4,12 @@
Other formatters: NullFormatter, RawTokenFormatter.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.formatter import Formatter
-from pygments.util import get_choice_opt
+from pygments.util import get_choice_opt
from pygments.token import Token
from pygments.console import colorize
@@ -86,44 +86,44 @@ class RawTokenFormatter(Formatter):
if self.compress == 'gz':
import gzip
outfile = gzip.GzipFile('', 'wb', 9, outfile)
-
- write = outfile.write
- flush = outfile.close
+
+ write = outfile.write
+ flush = outfile.close
elif self.compress == 'bz2':
import bz2
compressor = bz2.BZ2Compressor(9)
-
+
def write(text):
- outfile.write(compressor.compress(text))
-
+ outfile.write(compressor.compress(text))
+
def flush():
outfile.write(compressor.flush())
outfile.flush()
else:
- write = outfile.write
+ write = outfile.write
flush = outfile.flush
if self.error_color:
for ttype, value in tokensource:
- line = b"%r\t%r\n" % (ttype, value)
+ line = b"%r\t%r\n" % (ttype, value)
if ttype is Token.Error:
write(colorize(self.error_color, line))
else:
write(line)
else:
for ttype, value in tokensource:
- write(b"%r\t%r\n" % (ttype, value))
+ write(b"%r\t%r\n" % (ttype, value))
flush()
-
-TESTCASE_BEFORE = '''\
- def testNeedsName(lexer):
+
+TESTCASE_BEFORE = '''\
+ def testNeedsName(lexer):
fragment = %r
tokens = [
'''
-TESTCASE_AFTER = '''\
+TESTCASE_AFTER = '''\
]
- assert list(lexer.get_tokens(fragment)) == tokens
+ assert list(lexer.get_tokens(fragment)) == tokens
'''
@@ -149,8 +149,8 @@ class TestcaseFormatter(Formatter):
rawbuf.append(value)
outbuf.append('%s(%s, %r),\n' % (indentation, ttype, value))
- before = TESTCASE_BEFORE % (''.join(rawbuf),)
- during = ''.join(outbuf)
+ before = TESTCASE_BEFORE % (''.join(rawbuf),)
+ during = ''.join(outbuf)
after = TESTCASE_AFTER
if self.encoding is None:
outfile.write(before + during + after)
diff --git a/contrib/python/Pygments/py3/pygments/formatters/pangomarkup.py b/contrib/python/Pygments/py3/pygments/formatters/pangomarkup.py
index 926ccc6d3b..222cf775ba 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/pangomarkup.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/pangomarkup.py
@@ -1,83 +1,83 @@
-"""
- pygments.formatters.pangomarkup
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for Pango markup output.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-
-
-__all__ = ['PangoMarkupFormatter']
-
-
-_escape_table = {
- ord('&'): '&amp;',
- ord('<'): '&lt;',
-}
-
-
-def escape_special_chars(text, table=_escape_table):
- """Escape & and < for Pango Markup."""
- return text.translate(table)
-
-
-class PangoMarkupFormatter(Formatter):
- """
- Format tokens as Pango Markup code. It can then be rendered to an SVG.
-
- .. versionadded:: 2.9
- """
-
- name = 'Pango Markup'
- aliases = ['pango', 'pangomarkup']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
-
- self.styles = {}
-
- for token, style in self.style:
- start = ''
- end = ''
- if style['color']:
- start += '<span fgcolor="#%s">' % style['color']
- end = '</span>' + end
- if style['bold']:
- start += '<b>'
- end = '</b>' + end
- if style['italic']:
- start += '<i>'
- end = '</i>' + end
- if style['underline']:
- start += '<u>'
- end = '</u>' + end
- self.styles[token] = (start, end)
-
- def format_unencoded(self, tokensource, outfile):
- lastval = ''
- lasttype = None
-
- outfile.write('<tt>')
-
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- if ttype == lasttype:
- lastval += escape_special_chars(value)
- else:
- if lastval:
- stylebegin, styleend = self.styles[lasttype]
- outfile.write(stylebegin + lastval + styleend)
- lastval = escape_special_chars(value)
- lasttype = ttype
-
- if lastval:
- stylebegin, styleend = self.styles[lasttype]
- outfile.write(stylebegin + lastval + styleend)
-
- outfile.write('</tt>')
+"""
+ pygments.formatters.pangomarkup
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for Pango markup output.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.formatter import Formatter
+
+
+__all__ = ['PangoMarkupFormatter']
+
+
+_escape_table = {
+ ord('&'): '&amp;',
+ ord('<'): '&lt;',
+}
+
+
+def escape_special_chars(text, table=_escape_table):
+ """Escape & and < for Pango Markup."""
+ return text.translate(table)
+
+
+class PangoMarkupFormatter(Formatter):
+ """
+ Format tokens as Pango Markup code. It can then be rendered to an SVG.
+
+ .. versionadded:: 2.9
+ """
+
+ name = 'Pango Markup'
+ aliases = ['pango', 'pangomarkup']
+ filenames = []
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+
+ self.styles = {}
+
+ for token, style in self.style:
+ start = ''
+ end = ''
+ if style['color']:
+ start += '<span fgcolor="#%s">' % style['color']
+ end = '</span>' + end
+ if style['bold']:
+ start += '<b>'
+ end = '</b>' + end
+ if style['italic']:
+ start += '<i>'
+ end = '</i>' + end
+ if style['underline']:
+ start += '<u>'
+ end = '</u>' + end
+ self.styles[token] = (start, end)
+
+ def format_unencoded(self, tokensource, outfile):
+ lastval = ''
+ lasttype = None
+
+ outfile.write('<tt>')
+
+ for ttype, value in tokensource:
+ while ttype not in self.styles:
+ ttype = ttype.parent
+ if ttype == lasttype:
+ lastval += escape_special_chars(value)
+ else:
+ if lastval:
+ stylebegin, styleend = self.styles[lasttype]
+ outfile.write(stylebegin + lastval + styleend)
+ lastval = escape_special_chars(value)
+ lasttype = ttype
+
+ if lastval:
+ stylebegin, styleend = self.styles[lasttype]
+ outfile.write(stylebegin + lastval + styleend)
+
+ outfile.write('</tt>')
diff --git a/contrib/python/Pygments/py3/pygments/formatters/rtf.py b/contrib/python/Pygments/py3/pygments/formatters/rtf.py
index ba071c78f4..c68f108961 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/rtf.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/rtf.py
@@ -4,12 +4,12 @@
A formatter that generates RTF files.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.formatter import Formatter
-from pygments.util import get_int_opt, surrogatepair
+from pygments.util import get_int_opt, surrogatepair
__all__ = ['RtfFormatter']
@@ -34,7 +34,7 @@ class RtfFormatter(Formatter):
``'default'``).
`fontface`
- The used font family, for example ``Bitstream Vera Sans``. Defaults to
+ The used font family, for example ``Bitstream Vera Sans``. Defaults to
some generic font which is supposed to have fixed width.
`fontsize`
@@ -64,14 +64,14 @@ class RtfFormatter(Formatter):
self.fontsize = get_int_opt(options, 'fontsize', 0)
def _escape(self, text):
- return text.replace('\\', '\\\\') \
- .replace('{', '\\{') \
- .replace('}', '\\}')
+ return text.replace('\\', '\\\\') \
+ .replace('{', '\\{') \
+ .replace('}', '\\}')
def _escape_text(self, text):
- # empty strings, should give a small performance improvement
+ # empty strings, should give a small performance improvement
if not text:
- return ''
+ return ''
# escape text
text = self._escape(text)
@@ -84,21 +84,21 @@ class RtfFormatter(Formatter):
buf.append(str(c))
elif (2**7) <= cn < (2**16):
# single unicode escape sequence
- buf.append('{\\u%d}' % cn)
+ buf.append('{\\u%d}' % cn)
elif (2**16) <= cn:
# RTF limits unicode to 16 bits.
# Force surrogate pairs
- buf.append('{\\u%d}{\\u%d}' % surrogatepair(cn))
+ buf.append('{\\u%d}{\\u%d}' % surrogatepair(cn))
- return ''.join(buf).replace('\n', '\\par\n')
+ return ''.join(buf).replace('\n', '\\par\n')
def format_unencoded(self, tokensource, outfile):
# rtf 1.8 header
- outfile.write('{\\rtf1\\ansi\\uc0\\deff0'
- '{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
- '{\\colortbl;' % (self.fontface and
- ' ' + self._escape(self.fontface) or
- ''))
+ outfile.write('{\\rtf1\\ansi\\uc0\\deff0'
+ '{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
+ '{\\colortbl;' % (self.fontface and
+ ' ' + self._escape(self.fontface) or
+ ''))
# convert colors and save them in a mapping to access them later.
color_mapping = {}
@@ -107,15 +107,15 @@ class RtfFormatter(Formatter):
for color in style['color'], style['bgcolor'], style['border']:
if color and color not in color_mapping:
color_mapping[color] = offset
- outfile.write('\\red%d\\green%d\\blue%d;' % (
+ outfile.write('\\red%d\\green%d\\blue%d;' % (
int(color[0:2], 16),
int(color[2:4], 16),
int(color[4:6], 16)
))
offset += 1
- outfile.write('}\\f0 ')
+ outfile.write('}\\f0 ')
if self.fontsize:
- outfile.write('\\fs%d' % self.fontsize)
+ outfile.write('\\fs%d' % self.fontsize)
# highlight stream
for ttype, value in tokensource:
@@ -124,23 +124,23 @@ class RtfFormatter(Formatter):
style = self.style.style_for_token(ttype)
buf = []
if style['bgcolor']:
- buf.append('\\cb%d' % color_mapping[style['bgcolor']])
+ buf.append('\\cb%d' % color_mapping[style['bgcolor']])
if style['color']:
- buf.append('\\cf%d' % color_mapping[style['color']])
+ buf.append('\\cf%d' % color_mapping[style['color']])
if style['bold']:
- buf.append('\\b')
+ buf.append('\\b')
if style['italic']:
- buf.append('\\i')
+ buf.append('\\i')
if style['underline']:
- buf.append('\\ul')
+ buf.append('\\ul')
if style['border']:
- buf.append('\\chbrdr\\chcfpat%d' %
+ buf.append('\\chbrdr\\chcfpat%d' %
color_mapping[style['border']])
- start = ''.join(buf)
+ start = ''.join(buf)
if start:
- outfile.write('{%s ' % start)
+ outfile.write('{%s ' % start)
outfile.write(self._escape_text(value))
if start:
- outfile.write('}')
+ outfile.write('}')
- outfile.write('}')
+ outfile.write('}')
diff --git a/contrib/python/Pygments/py3/pygments/formatters/svg.py b/contrib/python/Pygments/py3/pygments/formatters/svg.py
index 547a7bbcdd..09043d362b 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/svg.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/svg.py
@@ -4,12 +4,12 @@
Formatter for SVG output.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.formatter import Formatter
-from pygments.token import Comment
+from pygments.token import Comment
from pygments.util import get_bool_opt, get_int_opt
__all__ = ['SvgFormatter']
@@ -52,19 +52,19 @@ class SvgFormatter(Formatter):
The value to give the wrapping ``<g>`` element's ``font-size``
attribute, defaults to ``"14px"``.
- `linenos`
- If ``True``, add line numbers (default: ``False``).
-
- `linenostart`
- The line number for the first line (default: ``1``).
-
- `linenostep`
- If set to a number n > 1, only every nth line number is printed.
-
- `linenowidth`
- Maximum width devoted to line numbers (default: ``3*ystep``, sufficient
- for up to 4-digit line numbers. Increase width for longer code blocks).
-
+ `linenos`
+ If ``True``, add line numbers (default: ``False``).
+
+ `linenostart`
+ The line number for the first line (default: ``1``).
+
+ `linenostep`
+ If set to a number n > 1, only every nth line number is printed.
+
+ `linenowidth`
+ Maximum width devoted to line numbers (default: ``3*ystep``, sufficient
+ for up to 4-digit line numbers. Increase width for longer code blocks).
+
`xoffset`
Starting offset in X direction, defaults to ``0``.
@@ -105,10 +105,10 @@ class SvgFormatter(Formatter):
self.yoffset = get_int_opt(options, 'yoffset', int_fs)
self.ystep = get_int_opt(options, 'ystep', int_fs + 5)
self.spacehack = get_bool_opt(options, 'spacehack', True)
- self.linenos = get_bool_opt(options,'linenos',False)
- self.linenostart = get_int_opt(options,'linenostart',1)
- self.linenostep = get_int_opt(options,'linenostep',1)
- self.linenowidth = get_int_opt(options,'linenowidth', 3*self.ystep)
+ self.linenos = get_bool_opt(options,'linenos',False)
+ self.linenostart = get_int_opt(options,'linenostart',1)
+ self.linenostep = get_int_opt(options,'linenostep',1)
+ self.linenowidth = get_int_opt(options,'linenowidth', 3*self.ystep)
self._stylecache = {}
def format_unencoded(self, tokensource, outfile):
@@ -132,20 +132,20 @@ class SvgFormatter(Formatter):
outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n')
outfile.write('<g font-family="%s" font-size="%s">\n' %
(self.fontfamily, self.fontsize))
-
- counter = self.linenostart
- counter_step = self.linenostep
- counter_style = self._get_style(Comment)
- line_x = x
-
- if self.linenos:
- if counter % counter_step == 0:
- outfile.write('<text x="%s" y="%s" %s text-anchor="end">%s</text>' %
- (x+self.linenowidth,y,counter_style,counter))
- line_x += self.linenowidth + self.ystep
- counter += 1
-
- outfile.write('<text x="%s" y="%s" xml:space="preserve">' % (line_x, y))
+
+ counter = self.linenostart
+ counter_step = self.linenostep
+ counter_style = self._get_style(Comment)
+ line_x = x
+
+ if self.linenos:
+ if counter % counter_step == 0:
+ outfile.write('<text x="%s" y="%s" %s text-anchor="end">%s</text>' %
+ (x+self.linenowidth,y,counter_style,counter))
+ line_x += self.linenowidth + self.ystep
+ counter += 1
+
+ outfile.write('<text x="%s" y="%s" xml:space="preserve">' % (line_x, y))
for ttype, value in tokensource:
style = self._get_style(ttype)
tspan = style and '<tspan' + style + '>' or ''
@@ -157,13 +157,13 @@ class SvgFormatter(Formatter):
for part in parts[:-1]:
outfile.write(tspan + part + tspanend)
y += self.ystep
- outfile.write('</text>\n')
- if self.linenos and counter % counter_step == 0:
- outfile.write('<text x="%s" y="%s" text-anchor="end" %s>%s</text>' %
- (x+self.linenowidth,y,counter_style,counter))
-
- counter += 1
- outfile.write('<text x="%s" y="%s" ' 'xml:space="preserve">' % (line_x,y))
+ outfile.write('</text>\n')
+ if self.linenos and counter % counter_step == 0:
+ outfile.write('<text x="%s" y="%s" text-anchor="end" %s>%s</text>' %
+ (x+self.linenowidth,y,counter_style,counter))
+
+ counter += 1
+ outfile.write('<text x="%s" y="%s" ' 'xml:space="preserve">' % (line_x,y))
outfile.write(tspan + parts[-1] + tspanend)
outfile.write('</text>')
diff --git a/contrib/python/Pygments/py3/pygments/formatters/terminal.py b/contrib/python/Pygments/py3/pygments/formatters/terminal.py
index a27594d0df..aced0ded52 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/terminal.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/terminal.py
@@ -4,7 +4,7 @@
Formatter for terminal output with ANSI sequences.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/formatters/terminal256.py b/contrib/python/Pygments/py3/pygments/formatters/terminal256.py
index be6f890f44..f2207fc65a 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/terminal256.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/terminal256.py
@@ -10,7 +10,7 @@
Formatter version 1.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -32,12 +32,12 @@ __all__ = ['Terminal256Formatter', 'TerminalTrueColorFormatter']
class EscapeSequence:
- def __init__(self, fg=None, bg=None, bold=False, underline=False, italic=False):
+ def __init__(self, fg=None, bg=None, bold=False, underline=False, italic=False):
self.fg = fg
self.bg = bg
self.bold = bold
self.underline = underline
- self.italic = italic
+ self.italic = italic
def escape(self, attrs):
if len(attrs):
@@ -66,8 +66,8 @@ class EscapeSequence:
attrs.append("01")
if self.underline:
attrs.append("04")
- if self.italic:
- attrs.append("03")
+ if self.italic:
+ attrs.append("03")
return self.escape(attrs)
def true_color_string(self):
@@ -80,8 +80,8 @@ class EscapeSequence:
attrs.append("01")
if self.underline:
attrs.append("04")
- if self.italic:
- attrs.append("03")
+ if self.italic:
+ attrs.append("03")
return self.escape(attrs)
def reset_string(self):
@@ -90,7 +90,7 @@ class EscapeSequence:
attrs.append("39")
if self.bg is not None:
attrs.append("49")
- if self.bold or self.underline or self.italic:
+ if self.bold or self.underline or self.italic:
attrs.append("00")
return self.escape(attrs)
@@ -123,10 +123,10 @@ class Terminal256Formatter(Formatter):
`style`
The style to use, can be a string or a Style subclass (default:
``'default'``).
-
- `linenos`
- Set to ``True`` to have line numbers on the terminal output as well
- (default: ``False`` = no line numbers).
+
+ `linenos`
+ Set to ``True`` to have line numbers on the terminal output as well
+ (default: ``False`` = no line numbers).
"""
name = 'Terminal256'
aliases = ['terminal256', 'console256', '256']
@@ -141,14 +141,14 @@ class Terminal256Formatter(Formatter):
self.usebold = 'nobold' not in options
self.useunderline = 'nounderline' not in options
- self.useitalic = 'noitalic' not in options
+ self.useitalic = 'noitalic' not in options
self._build_color_table() # build an RGB-to-256 color conversion table
self._setup_styles() # convert selected style's colors to term. colors
- self.linenos = options.get('linenos', False)
- self._lineno = 0
-
+ self.linenos = options.get('linenos', False)
+ self._lineno = 0
+
def _build_color_table(self):
# colors 0..15: 16 basic colors
@@ -237,22 +237,22 @@ class Terminal256Formatter(Formatter):
escape.bold = True
if self.useunderline and ndef['underline']:
escape.underline = True
- if self.useitalic and ndef['italic']:
- escape.italic = True
+ if self.useitalic and ndef['italic']:
+ escape.italic = True
self.style_string[str(ttype)] = (escape.color_string(),
escape.reset_string())
- def _write_lineno(self, outfile):
- self._lineno += 1
- outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
-
+ def _write_lineno(self, outfile):
+ self._lineno += 1
+ outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
+
def format(self, tokensource, outfile):
return Formatter.format(self, tokensource, outfile)
def format_unencoded(self, tokensource, outfile):
- if self.linenos:
- self._write_lineno(outfile)
-
+ if self.linenos:
+ self._write_lineno(outfile)
+
for ttype, value in tokensource:
not_found = True
while ttype and not_found:
@@ -266,11 +266,11 @@ class Terminal256Formatter(Formatter):
for line in spl[:-1]:
if line:
outfile.write(on + line + off)
- if self.linenos:
- self._write_lineno(outfile)
- else:
- outfile.write('\n')
-
+ if self.linenos:
+ self._write_lineno(outfile)
+ else:
+ outfile.write('\n')
+
if spl[-1]:
outfile.write(on + spl[-1] + off)
@@ -285,11 +285,11 @@ class Terminal256Formatter(Formatter):
if not_found:
outfile.write(value)
- if self.linenos:
- outfile.write("\n")
-
-
+ if self.linenos:
+ outfile.write("\n")
+
+
class TerminalTrueColorFormatter(Terminal256Formatter):
r"""
Format tokens with ANSI color sequences, for output in a true-color
@@ -332,7 +332,7 @@ class TerminalTrueColorFormatter(Terminal256Formatter):
escape.bold = True
if self.useunderline and ndef['underline']:
escape.underline = True
- if self.useitalic and ndef['italic']:
- escape.italic = True
+ if self.useitalic and ndef['italic']:
+ escape.italic = True
self.style_string[str(ttype)] = (escape.true_color_string(),
escape.reset_string())
diff --git a/contrib/python/Pygments/py3/pygments/lexer.py b/contrib/python/Pygments/py3/pygments/lexer.py
index 33d738a8d6..4aa3105f39 100644
--- a/contrib/python/Pygments/py3/pygments/lexer.py
+++ b/contrib/python/Pygments/py3/pygments/lexer.py
@@ -4,7 +4,7 @@
Base lexer classes.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -16,7 +16,7 @@ from pygments.filter import apply_filters, Filter
from pygments.filters import get_filter_by_name
from pygments.token import Error, Text, Other, _TokenType
from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
- make_analysator, Future, guess_decode
+ make_analysator, Future, guess_decode
from pygments.regexopt import regex_opt
__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
@@ -45,7 +45,7 @@ class LexerMeta(type):
return type.__new__(mcs, name, bases, d)
-class Lexer(metaclass=LexerMeta):
+class Lexer(metaclass=LexerMeta):
"""
Lexer for a specific language.
@@ -141,16 +141,16 @@ class Lexer(metaclass=LexerMeta):
Also preprocess the text, i.e. expand tabs and strip it if
wanted and applies registered filters.
"""
- if not isinstance(text, str):
+ if not isinstance(text, str):
if self.encoding == 'guess':
text, _ = guess_decode(text)
elif self.encoding == 'chardet':
try:
import chardet
- except ImportError as e:
+ except ImportError as e:
raise ImportError('To enable chardet encoding guessing, '
'please install the chardet library '
- 'from http://chardet.feedparser.org/') from e
+ 'from http://chardet.feedparser.org/') from e
# check for BOM first
decoded = None
for bom, encoding in _encoding_map:
@@ -165,11 +165,11 @@ class Lexer(metaclass=LexerMeta):
text = decoded
else:
text = text.decode(self.encoding)
- if text.startswith('\ufeff'):
- text = text[len('\ufeff'):]
+ if text.startswith('\ufeff'):
+ text = text[len('\ufeff'):]
else:
- if text.startswith('\ufeff'):
- text = text[len('\ufeff'):]
+ if text.startswith('\ufeff'):
+ text = text[len('\ufeff'):]
# text now *is* a unicode string
text = text.replace('\r\n', '\n')
@@ -248,7 +248,7 @@ class include(str): # pylint: disable=invalid-name
pass
-class _inherit:
+class _inherit:
"""
Indicates the a state should inherit from its superclass.
"""
@@ -271,7 +271,7 @@ class combined(tuple): # pylint: disable=invalid-name
pass
-class _PseudoMatch:
+class _PseudoMatch:
"""
A pseudo match object constructed from a string.
"""
@@ -324,12 +324,12 @@ def bygroups(*args):
return callback
-class _This:
+class _This:
"""
Special singleton used for indicating the caller class.
Used by ``using``.
"""
-
+
this = _This()
@@ -466,7 +466,7 @@ class RegexLexerMeta(LexerMeta):
def _process_state(cls, unprocessed, processed, state):
"""Preprocess a single state definition."""
- assert type(state) is str, "wrong state name %r" % state
+ assert type(state) is str, "wrong state name %r" % state
assert state[0] != '#', "invalid state name %r" % state
if state in processed:
return processed[state]
@@ -495,7 +495,7 @@ class RegexLexerMeta(LexerMeta):
rex = cls._process_regex(tdef[0], rflags, state)
except Exception as err:
raise ValueError("uncompilable regex %r in state %r of %r: %s" %
- (tdef[0], state, cls, err)) from err
+ (tdef[0], state, cls, err)) from err
token = cls._process_token(tdef[1])
@@ -533,7 +533,7 @@ class RegexLexerMeta(LexerMeta):
for c in cls.__mro__:
toks = c.__dict__.get('tokens', {})
- for state, items in toks.items():
+ for state, items in toks.items():
curitems = tokens.get(state)
if curitems is None:
# N.b. because this is assigned by reference, sufficiently
@@ -579,7 +579,7 @@ class RegexLexerMeta(LexerMeta):
return type.__call__(cls, *args, **kwds)
-class RegexLexer(Lexer, metaclass=RegexLexerMeta):
+class RegexLexer(Lexer, metaclass=RegexLexerMeta):
"""
Base for simple stateful regular expression-based lexers.
Simplifies the lexing process so that you need only
@@ -632,7 +632,7 @@ class RegexLexer(Lexer, metaclass=RegexLexerMeta):
if type(action) is _TokenType:
yield pos, action, m.group()
else:
- yield from action(self, m)
+ yield from action(self, m)
pos = m.end()
if new_state is not None:
# state transition
@@ -667,7 +667,7 @@ class RegexLexer(Lexer, metaclass=RegexLexerMeta):
# at EOL, reset state to "root"
statestack = ['root']
statetokens = tokendefs['root']
- yield pos, Text, '\n'
+ yield pos, Text, '\n'
pos += 1
continue
yield pos, Error, text[pos]
@@ -676,7 +676,7 @@ class RegexLexer(Lexer, metaclass=RegexLexerMeta):
break
-class LexerContext:
+class LexerContext:
"""
A helper object that holds lexer position data.
"""
@@ -719,7 +719,7 @@ class ExtendedRegexLexer(RegexLexer):
yield ctx.pos, action, m.group()
ctx.pos = m.end()
else:
- yield from action(self, m, ctx)
+ yield from action(self, m, ctx)
if not new_state:
# altered the state stack?
statetokens = tokendefs[ctx.stack[-1]]
@@ -755,7 +755,7 @@ class ExtendedRegexLexer(RegexLexer):
# at EOL, reset state to "root"
ctx.stack = ['root']
statetokens = tokendefs['root']
- yield ctx.pos, Text, '\n'
+ yield ctx.pos, Text, '\n'
ctx.pos += 1
continue
yield ctx.pos, Error, text[ctx.pos]
@@ -783,7 +783,7 @@ def do_insertions(insertions, tokens):
index, itokens = next(insertions)
except StopIteration:
# no insertions
- yield from tokens
+ yield from tokens
return
realpos = None
@@ -798,9 +798,9 @@ def do_insertions(insertions, tokens):
oldi = 0
while insleft and i + len(v) >= index:
tmpval = v[oldi:index - i]
- if tmpval:
- yield realpos, t, tmpval
- realpos += len(tmpval)
+ if tmpval:
+ yield realpos, t, tmpval
+ realpos += len(tmpval)
for it_index, it_token, it_value in itokens:
yield realpos, it_token, it_value
realpos += len(it_value)
@@ -810,9 +810,9 @@ def do_insertions(insertions, tokens):
except StopIteration:
insleft = False
break # not strictly necessary
- if oldi < len(v):
- yield realpos, t, v[oldi:]
- realpos += len(v) - oldi
+ if oldi < len(v):
+ yield realpos, t, v[oldi:]
+ realpos += len(v) - oldi
# leftover tokens
while insleft:
@@ -850,7 +850,7 @@ class ProfilingRegexLexerMeta(RegexLexerMeta):
return match_func
-class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
+class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
"""Drop-in replacement for RegexLexer that does profiling of its regexes."""
_prof_data = []
@@ -859,7 +859,7 @@ class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
def get_tokens_unprocessed(self, text, stack=('root',)):
# this needs to be a stack, since using(this) will produce nested calls
self.__class__._prof_data.append({})
- yield from RegexLexer.get_tokens_unprocessed(self, text, stack)
+ yield from RegexLexer.get_tokens_unprocessed(self, text, stack)
rawdata = self.__class__._prof_data.pop()
data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
n, 1000 * t, 1000 * t / n)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/__init__.py b/contrib/python/Pygments/py3/pygments/lexers/__init__.py
index 9b89b6da3f..2cc29a69c2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/__init__.py
@@ -4,7 +4,7 @@
Pygments lexers.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,15 +17,15 @@ from os.path import basename
from pygments.lexers._mapping import LEXERS
from pygments.modeline import get_filetype_from_buffer
from pygments.plugin import find_plugin_lexers
-from pygments.util import ClassNotFound, guess_decode
+from pygments.util import ClassNotFound, guess_decode
-COMPAT = {
- 'Python3Lexer': 'PythonLexer',
- 'Python3TracebackLexer': 'PythonTracebackLexer',
-}
+COMPAT = {
+ 'Python3Lexer': 'PythonLexer',
+ 'Python3TracebackLexer': 'PythonTracebackLexer',
+}
__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
- 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT)
+ 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT)
_lexer_cache = {}
_pattern_cache = {}
@@ -51,7 +51,7 @@ def get_all_lexers():
"""Return a generator of tuples in the form ``(name, aliases,
filenames, mimetypes)`` of all know lexers.
"""
- for item in LEXERS.values():
+ for item in LEXERS.values():
yield item[1:]
for lexer in find_plugin_lexers():
yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes
@@ -65,7 +65,7 @@ def find_lexer_class(name):
if name in _lexer_cache:
return _lexer_cache[name]
# lookup builtin lexers
- for module_name, lname, aliases, _, _ in LEXERS.values():
+ for module_name, lname, aliases, _, _ in LEXERS.values():
if name == lname:
_load_lexers(module_name)
return _lexer_cache[name]
@@ -85,7 +85,7 @@ def find_lexer_class_by_name(_alias):
if not _alias:
raise ClassNotFound('no lexer for alias %r found' % _alias)
# lookup builtin lexers
- for module_name, name, aliases, _, _ in LEXERS.values():
+ for module_name, name, aliases, _, _ in LEXERS.values():
if _alias.lower() in aliases:
if name not in _lexer_cache:
_load_lexers(module_name)
@@ -106,7 +106,7 @@ def get_lexer_by_name(_alias, **options):
raise ClassNotFound('no lexer for alias %r found' % _alias)
# lookup builtin lexers
- for module_name, name, aliases, _, _ in LEXERS.values():
+ for module_name, name, aliases, _, _ in LEXERS.values():
if _alias.lower() in aliases:
if name not in _lexer_cache:
_load_lexers(module_name)
@@ -145,9 +145,9 @@ def load_lexer_from_file(filename, lexername="CustomLexer", **options):
lexer_class = custom_namespace[lexername]
# And finally instantiate it with the options
return lexer_class(**options)
- except OSError as err:
- raise ClassNotFound('cannot read %s: %s' % (filename, err))
- except ClassNotFound:
+ except OSError as err:
+ raise ClassNotFound('cannot read %s: %s' % (filename, err))
+ except ClassNotFound:
raise
except Exception as err:
raise ClassNotFound('error when loading custom lexer: %s' % err)
@@ -163,7 +163,7 @@ def find_lexer_class_for_filename(_fn, code=None):
"""
matches = []
fn = basename(_fn)
- for modname, name, _, filenames, _ in LEXERS.values():
+ for modname, name, _, filenames, _ in LEXERS.values():
for filename in filenames:
if _fn_matches(fn, filename):
if name not in _lexer_cache:
@@ -174,7 +174,7 @@ def find_lexer_class_for_filename(_fn, code=None):
if _fn_matches(fn, filename):
matches.append((cls, filename))
- if isinstance(code, bytes):
+ if isinstance(code, bytes):
# decode it, since all analyse_text functions expect unicode
code = guess_decode(code)
@@ -215,7 +215,7 @@ def get_lexer_for_mimetype(_mime, **options):
Raises ClassNotFound if not found.
"""
- for modname, name, _, _, mimetypes in LEXERS.values():
+ for modname, name, _, _, mimetypes in LEXERS.values():
if _mime in mimetypes:
if name not in _lexer_cache:
_load_lexers(modname)
@@ -234,7 +234,7 @@ def _iter_lexerclasses(plugins=True):
_load_lexers(module_name)
yield _lexer_cache[name]
if plugins:
- yield from find_plugin_lexers()
+ yield from find_plugin_lexers()
def guess_lexer_for_filename(_fn, _text, **options):
@@ -291,7 +291,7 @@ def guess_lexer_for_filename(_fn, _text, **options):
def guess_lexer(_text, **options):
"""Guess a lexer by strong distinctions in the text (eg, shebang)."""
- if not isinstance(_text, str):
+ if not isinstance(_text, str):
inencoding = options.get('inencoding', options.get('encoding'))
if inencoding:
_text = _text.decode(inencoding or 'utf8')
@@ -329,8 +329,8 @@ class _automodule(types.ModuleType):
cls = _lexer_cache[info[1]]
setattr(self, name, cls)
return cls
- if name in COMPAT:
- return getattr(self, COMPAT[name])
+ if name in COMPAT:
+ return getattr(self, COMPAT[name])
raise AttributeError(name)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py
index 74e057ef40..a83d34fae0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py
@@ -9,11 +9,11 @@
TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
for function and variable names.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-ASYFUNCNAME = {
+ASYFUNCNAME = {
'AND',
'Arc',
'ArcArrow',
@@ -1037,9 +1037,9 @@ ASYFUNCNAME = {
'ztick',
'ztick3',
'ztrans'
-}
+}
-ASYVARNAME = {
+ASYVARNAME = {
'AliceBlue',
'Align',
'Allow',
@@ -1641,4 +1641,4 @@ ASYVARNAME = {
'ylabelwidth',
'zerotickfuzz',
'zerowinding'
-}
+}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py
index 08d70e1549..ed23f7883f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py
@@ -4,11 +4,11 @@
ANSI Common Lisp builtins.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-BUILTIN_FUNCTIONS = { # 638 functions
+BUILTIN_FUNCTIONS = { # 638 functions
'<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
'adjustable-array-p', 'adjust-array', 'allocate-instance',
@@ -156,17 +156,17 @@ BUILTIN_FUNCTIONS = { # 638 functions
'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
'y-or-n-p', 'zerop',
-}
+}
-SPECIAL_FORMS = {
+SPECIAL_FORMS = {
'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
'unwind-protect',
-}
+}
-MACROS = {
+MACROS = {
'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
'define-compiler-macro', 'define-condition', 'define-method-combination',
@@ -187,19 +187,19 @@ MACROS = {
'with-input-from-string', 'with-open-file', 'with-open-stream',
'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
'with-slots', 'with-standard-io-syntax',
-}
+}
-LAMBDA_LIST_KEYWORDS = {
+LAMBDA_LIST_KEYWORDS = {
'&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
'&rest', '&whole',
-}
+}
-DECLARATIONS = {
+DECLARATIONS = {
'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
'ignorable', 'notinline', 'type',
-}
+}
-BUILTIN_TYPES = {
+BUILTIN_TYPES = {
'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
@@ -216,9 +216,9 @@ BUILTIN_TYPES = {
'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
'undefined-function', 'warning',
-}
+}
-BUILTIN_CLASSES = {
+BUILTIN_CLASSES = {
'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
'file-stream', 'float', 'function', 'generic-function', 'hash-table',
@@ -228,4 +228,4 @@ BUILTIN_CLASSES = {
'standard-generic-function', 'standard-method', 'standard-object',
'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
-}
+}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py
index 72d86db1e7..253d43bd39 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py
@@ -7,19 +7,19 @@
File may be also used as standalone generator for aboves.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-COCOA_INTERFACES = {'AAAttribution', 'ABNewPersonViewController', 'ABPeoplePickerNavigationController', 'ABPersonViewController', 'ABUnknownPersonViewController', 'ACAccount', 'ACAccountCredential', 'ACAccountStore', 'ACAccountType', 'ADBannerView', 'ADClient', 'ADInterstitialAd', 'ADInterstitialAdPresentationViewController', 'AEAssessmentConfiguration', 'AEAssessmentSession', 'ALAsset', 'ALAssetRepresentation', 'ALAssetsFilter', 'ALAssetsGroup', 'ALAssetsLibrary', 'APActivationPayload', 'ARAnchor', 'ARAppClipCodeAnchor', 'ARBody2D', 'ARBodyAnchor', 'ARBodyTrackingConfiguration', 'ARCamera', 'ARCoachingOverlayView', 'ARCollaborationData', 'ARConfiguration', 'ARDepthData', 'ARDirectionalLightEstimate', 'AREnvironmentProbeAnchor', 'ARFaceAnchor', 'ARFaceGeometry', 'ARFaceTrackingConfiguration', 'ARFrame', 'ARGeoAnchor', 'ARGeoTrackingConfiguration', 'ARGeoTrackingStatus', 'ARGeometryElement', 'ARGeometrySource', 'ARHitTestResult', 'ARImageAnchor', 'ARImageTrackingConfiguration', 'ARLightEstimate', 'ARMatteGenerator', 'ARMeshAnchor', 'ARMeshGeometry', 'ARObjectAnchor', 'ARObjectScanningConfiguration', 'AROrientationTrackingConfiguration', 'ARParticipantAnchor', 'ARPlaneAnchor', 'ARPlaneGeometry', 'ARPointCloud', 'ARPositionalTrackingConfiguration', 'ARQuickLookPreviewItem', 'ARRaycastQuery', 'ARRaycastResult', 'ARReferenceImage', 'ARReferenceObject', 'ARSCNFaceGeometry', 'ARSCNPlaneGeometry', 'ARSCNView', 'ARSKView', 'ARSession', 'ARSkeleton', 'ARSkeleton2D', 'ARSkeleton3D', 'ARSkeletonDefinition', 'ARTrackedRaycast', 'ARVideoFormat', 'ARView', 'ARWorldMap', 'ARWorldTrackingConfiguration', 'ASAccountAuthenticationModificationController', 'ASAccountAuthenticationModificationExtensionContext', 'ASAccountAuthenticationModificationReplacePasswordWithSignInWithAppleRequest', 'ASAccountAuthenticationModificationRequest', 'ASAccountAuthenticationModificationUpgradePasswordToStrongPasswordRequest', 'ASAccountAuthenticationModificationViewController', 'ASAuthorization', 'ASAuthorizationAppleIDButton', 'ASAuthorizationAppleIDCredential', 'ASAuthorizationAppleIDProvider', 'ASAuthorizationAppleIDRequest', 'ASAuthorizationController', 'ASAuthorizationOpenIDRequest', 'ASAuthorizationPasswordProvider', 'ASAuthorizationPasswordRequest', 'ASAuthorizationProviderExtensionAuthorizationRequest', 'ASAuthorizationRequest', 'ASAuthorizationSingleSignOnCredential', 'ASAuthorizationSingleSignOnProvider', 'ASAuthorizationSingleSignOnRequest', 'ASCredentialIdentityStore', 'ASCredentialIdentityStoreState', 'ASCredentialProviderExtensionContext', 'ASCredentialProviderViewController', 'ASCredentialServiceIdentifier', 'ASIdentifierManager', 'ASPasswordCredential', 'ASPasswordCredentialIdentity', 'ASWebAuthenticationSession', 'ASWebAuthenticationSessionRequest', 'ASWebAuthenticationSessionWebBrowserSessionManager', 'ATTrackingManager', 'AUAudioUnit', 'AUAudioUnitBus', 'AUAudioUnitBusArray', 'AUAudioUnitPreset', 'AUAudioUnitV2Bridge', 'AUAudioUnitViewConfiguration', 'AUParameter', 'AUParameterGroup', 'AUParameterNode', 'AUParameterTree', 'AUViewController', 'AVAggregateAssetDownloadTask', 'AVAsset', 'AVAssetCache', 'AVAssetDownloadStorageManagementPolicy', 'AVAssetDownloadStorageManager', 'AVAssetDownloadTask', 'AVAssetDownloadURLSession', 'AVAssetExportSession', 'AVAssetImageGenerator', 'AVAssetReader', 'AVAssetReaderAudioMixOutput', 'AVAssetReaderOutput', 'AVAssetReaderOutputMetadataAdaptor', 'AVAssetReaderSampleReferenceOutput', 'AVAssetReaderTrackOutput', 'AVAssetReaderVideoCompositionOutput', 'AVAssetResourceLoader', 'AVAssetResourceLoadingContentInformationRequest', 'AVAssetResourceLoadingDataRequest', 'AVAssetResourceLoadingRequest', 'AVAssetResourceLoadingRequestor', 'AVAssetResourceRenewalRequest', 'AVAssetSegmentReport', 'AVAssetSegmentReportSampleInformation', 'AVAssetSegmentTrackReport', 'AVAssetTrack', 'AVAssetTrackGroup', 'AVAssetTrackSegment', 'AVAssetWriter', 'AVAssetWriterInput', 'AVAssetWriterInputGroup', 'AVAssetWriterInputMetadataAdaptor', 'AVAssetWriterInputPassDescription', 'AVAssetWriterInputPixelBufferAdaptor', 'AVAsynchronousCIImageFilteringRequest', 'AVAsynchronousVideoCompositionRequest', 'AVAudioMix', 'AVAudioMixInputParameters', 'AVAudioSession', 'AVCameraCalibrationData', 'AVCaptureAudioChannel', 'AVCaptureAudioDataOutput', 'AVCaptureAudioFileOutput', 'AVCaptureAudioPreviewOutput', 'AVCaptureAutoExposureBracketedStillImageSettings', 'AVCaptureBracketedStillImageSettings', 'AVCaptureConnection', 'AVCaptureDataOutputSynchronizer', 'AVCaptureDepthDataOutput', 'AVCaptureDevice', 'AVCaptureDeviceDiscoverySession', 'AVCaptureDeviceFormat', 'AVCaptureDeviceInput', 'AVCaptureDeviceInputSource', 'AVCaptureFileOutput', 'AVCaptureInput', 'AVCaptureInputPort', 'AVCaptureManualExposureBracketedStillImageSettings', 'AVCaptureMetadataInput', 'AVCaptureMetadataOutput', 'AVCaptureMovieFileOutput', 'AVCaptureMultiCamSession', 'AVCaptureOutput', 'AVCapturePhoto', 'AVCapturePhotoBracketSettings', 'AVCapturePhotoOutput', 'AVCapturePhotoSettings', 'AVCaptureResolvedPhotoSettings', 'AVCaptureScreenInput', 'AVCaptureSession', 'AVCaptureStillImageOutput', 'AVCaptureSynchronizedData', 'AVCaptureSynchronizedDataCollection', 'AVCaptureSynchronizedDepthData', 'AVCaptureSynchronizedMetadataObjectData', 'AVCaptureSynchronizedSampleBufferData', 'AVCaptureSystemPressureState', 'AVCaptureVideoDataOutput', 'AVCaptureVideoPreviewLayer', 'AVComposition', 'AVCompositionTrack', 'AVCompositionTrackFormatDescriptionReplacement', 'AVCompositionTrackSegment', 'AVContentKeyRequest', 'AVContentKeyResponse', 'AVContentKeySession', 'AVDateRangeMetadataGroup', 'AVDepthData', 'AVDisplayCriteria', 'AVFragmentedAsset', 'AVFragmentedAssetMinder', 'AVFragmentedAssetTrack', 'AVFragmentedMovie', 'AVFragmentedMovieMinder', 'AVFragmentedMovieTrack', 'AVFrameRateRange', 'AVMediaDataStorage', 'AVMediaSelection', 'AVMediaSelectionGroup', 'AVMediaSelectionOption', 'AVMetadataBodyObject', 'AVMetadataCatBodyObject', 'AVMetadataDogBodyObject', 'AVMetadataFaceObject', 'AVMetadataGroup', 'AVMetadataHumanBodyObject', 'AVMetadataItem', 'AVMetadataItemFilter', 'AVMetadataItemValueRequest', 'AVMetadataMachineReadableCodeObject', 'AVMetadataObject', 'AVMetadataSalientObject', 'AVMovie', 'AVMovieTrack', 'AVMutableAssetDownloadStorageManagementPolicy', 'AVMutableAudioMix', 'AVMutableAudioMixInputParameters', 'AVMutableComposition', 'AVMutableCompositionTrack', 'AVMutableDateRangeMetadataGroup', 'AVMutableMediaSelection', 'AVMutableMetadataItem', 'AVMutableMovie', 'AVMutableMovieTrack', 'AVMutableTimedMetadataGroup', 'AVMutableVideoComposition', 'AVMutableVideoCompositionInstruction', 'AVMutableVideoCompositionLayerInstruction', 'AVOutputSettingsAssistant', 'AVPersistableContentKeyRequest', 'AVPictureInPictureController', 'AVPlayer', 'AVPlayerItem', 'AVPlayerItemAccessLog', 'AVPlayerItemAccessLogEvent', 'AVPlayerItemErrorLog', 'AVPlayerItemErrorLogEvent', 'AVPlayerItemLegibleOutput', 'AVPlayerItemMediaDataCollector', 'AVPlayerItemMetadataCollector', 'AVPlayerItemMetadataOutput', 'AVPlayerItemOutput', 'AVPlayerItemTrack', 'AVPlayerItemVideoOutput', 'AVPlayerLayer', 'AVPlayerLooper', 'AVPlayerMediaSelectionCriteria', 'AVPlayerViewController', 'AVPortraitEffectsMatte', 'AVQueuePlayer', 'AVRouteDetector', 'AVRoutePickerView', 'AVSampleBufferAudioRenderer', 'AVSampleBufferDisplayLayer', 'AVSampleBufferRenderSynchronizer', 'AVSemanticSegmentationMatte', 'AVSynchronizedLayer', 'AVTextStyleRule', 'AVTimedMetadataGroup', 'AVURLAsset', 'AVVideoComposition', 'AVVideoCompositionCoreAnimationTool', 'AVVideoCompositionInstruction', 'AVVideoCompositionLayerInstruction', 'AVVideoCompositionRenderContext', 'AVVideoCompositionRenderHint', 'AXCustomContent', 'BCChatAction', 'BCChatButton', 'BGAppRefreshTask', 'BGAppRefreshTaskRequest', 'BGProcessingTask', 'BGProcessingTaskRequest', 'BGTask', 'BGTaskRequest', 'BGTaskScheduler', 'CAAnimation', 'CAAnimationGroup', 'CABTMIDICentralViewController', 'CABTMIDILocalPeripheralViewController', 'CABasicAnimation', 'CADisplayLink', 'CAEAGLLayer', 'CAEmitterCell', 'CAEmitterLayer', 'CAGradientLayer', 'CAInterAppAudioSwitcherView', 'CAInterAppAudioTransportView', 'CAKeyframeAnimation', 'CALayer', 'CAMediaTimingFunction', 'CAMetalLayer', 'CAPropertyAnimation', 'CAReplicatorLayer', 'CAScrollLayer', 'CAShapeLayer', 'CASpringAnimation', 'CATextLayer', 'CATiledLayer', 'CATransaction', 'CATransformLayer', 'CATransition', 'CAValueFunction', 'CBATTRequest', 'CBAttribute', 'CBCentral', 'CBCentralManager', 'CBCharacteristic', 'CBDescriptor', 'CBL2CAPChannel', 'CBManager', 'CBMutableCharacteristic', 'CBMutableDescriptor', 'CBMutableService', 'CBPeer', 'CBPeripheral', 'CBPeripheralManager', 'CBService', 'CBUUID', 'CHHapticDynamicParameter', 'CHHapticEngine', 'CHHapticEvent', 'CHHapticEventParameter', 'CHHapticParameterCurve', 'CHHapticParameterCurveControlPoint', 'CHHapticPattern', 'CIAztecCodeDescriptor', 'CIBarcodeDescriptor', 'CIBlendKernel', 'CIColor', 'CIColorKernel', 'CIContext', 'CIDataMatrixCodeDescriptor', 'CIDetector', 'CIFaceFeature', 'CIFeature', 'CIFilter', 'CIFilterGenerator', 'CIFilterShape', 'CIImage', 'CIImageAccumulator', 'CIImageProcessorKernel', 'CIKernel', 'CIPDF417CodeDescriptor', 'CIPlugIn', 'CIQRCodeDescriptor', 'CIQRCodeFeature', 'CIRectangleFeature', 'CIRenderDestination', 'CIRenderInfo', 'CIRenderTask', 'CISampler', 'CITextFeature', 'CIVector', 'CIWarpKernel', 'CKAcceptSharesOperation', 'CKAsset', 'CKContainer', 'CKDatabase', 'CKDatabaseNotification', 'CKDatabaseOperation', 'CKDatabaseSubscription', 'CKDiscoverAllUserIdentitiesOperation', 'CKDiscoverUserIdentitiesOperation', 'CKFetchDatabaseChangesOperation', 'CKFetchNotificationChangesOperation', 'CKFetchRecordChangesOperation', 'CKFetchRecordZoneChangesConfiguration', 'CKFetchRecordZoneChangesOperation', 'CKFetchRecordZoneChangesOptions', 'CKFetchRecordZonesOperation', 'CKFetchRecordsOperation', 'CKFetchShareMetadataOperation', 'CKFetchShareParticipantsOperation', 'CKFetchSubscriptionsOperation', 'CKFetchWebAuthTokenOperation', 'CKLocationSortDescriptor', 'CKMarkNotificationsReadOperation', 'CKModifyBadgeOperation', 'CKModifyRecordZonesOperation', 'CKModifyRecordsOperation', 'CKModifySubscriptionsOperation', 'CKNotification', 'CKNotificationID', 'CKNotificationInfo', 'CKOperation', 'CKOperationConfiguration', 'CKOperationGroup', 'CKQuery', 'CKQueryCursor', 'CKQueryNotification', 'CKQueryOperation', 'CKQuerySubscription', 'CKRecord', 'CKRecordID', 'CKRecordZone', 'CKRecordZoneID', 'CKRecordZoneNotification', 'CKRecordZoneSubscription', 'CKReference', 'CKServerChangeToken', 'CKShare', 'CKShareMetadata', 'CKShareParticipant', 'CKSubscription', 'CKUserIdentity', 'CKUserIdentityLookupInfo', 'CLBeacon', 'CLBeaconIdentityConstraint', 'CLBeaconRegion', 'CLCircularRegion', 'CLFloor', 'CLGeocoder', 'CLHeading', 'CLKComplication', 'CLKComplicationDescriptor', 'CLKComplicationServer', 'CLKComplicationTemplate', 'CLKComplicationTemplateCircularSmallRingImage', 'CLKComplicationTemplateCircularSmallRingText', 'CLKComplicationTemplateCircularSmallSimpleImage', 'CLKComplicationTemplateCircularSmallSimpleText', 'CLKComplicationTemplateCircularSmallStackImage', 'CLKComplicationTemplateCircularSmallStackText', 'CLKComplicationTemplateExtraLargeColumnsText', 'CLKComplicationTemplateExtraLargeRingImage', 'CLKComplicationTemplateExtraLargeRingText', 'CLKComplicationTemplateExtraLargeSimpleImage', 'CLKComplicationTemplateExtraLargeSimpleText', 'CLKComplicationTemplateExtraLargeStackImage', 'CLKComplicationTemplateExtraLargeStackText', 'CLKComplicationTemplateGraphicBezelCircularText', 'CLKComplicationTemplateGraphicCircular', 'CLKComplicationTemplateGraphicCircularClosedGaugeImage', 'CLKComplicationTemplateGraphicCircularClosedGaugeText', 'CLKComplicationTemplateGraphicCircularImage', 'CLKComplicationTemplateGraphicCircularOpenGaugeImage', 'CLKComplicationTemplateGraphicCircularOpenGaugeRangeText', 'CLKComplicationTemplateGraphicCircularOpenGaugeSimpleText', 'CLKComplicationTemplateGraphicCircularStackImage', 'CLKComplicationTemplateGraphicCircularStackText', 'CLKComplicationTemplateGraphicCornerCircularImage', 'CLKComplicationTemplateGraphicCornerGaugeImage', 'CLKComplicationTemplateGraphicCornerGaugeText', 'CLKComplicationTemplateGraphicCornerStackText', 'CLKComplicationTemplateGraphicCornerTextImage', 'CLKComplicationTemplateGraphicExtraLargeCircular', 'CLKComplicationTemplateGraphicExtraLargeCircularClosedGaugeImage', 'CLKComplicationTemplateGraphicExtraLargeCircularClosedGaugeText', 'CLKComplicationTemplateGraphicExtraLargeCircularImage', 'CLKComplicationTemplateGraphicExtraLargeCircularOpenGaugeImage', 'CLKComplicationTemplateGraphicExtraLargeCircularOpenGaugeRangeText', 'CLKComplicationTemplateGraphicExtraLargeCircularOpenGaugeSimpleText', 'CLKComplicationTemplateGraphicExtraLargeCircularStackImage', 'CLKComplicationTemplateGraphicExtraLargeCircularStackText', 'CLKComplicationTemplateGraphicRectangularFullImage', 'CLKComplicationTemplateGraphicRectangularLargeImage', 'CLKComplicationTemplateGraphicRectangularStandardBody', 'CLKComplicationTemplateGraphicRectangularTextGauge', 'CLKComplicationTemplateModularLargeColumns', 'CLKComplicationTemplateModularLargeStandardBody', 'CLKComplicationTemplateModularLargeTable', 'CLKComplicationTemplateModularLargeTallBody', 'CLKComplicationTemplateModularSmallColumnsText', 'CLKComplicationTemplateModularSmallRingImage', 'CLKComplicationTemplateModularSmallRingText', 'CLKComplicationTemplateModularSmallSimpleImage', 'CLKComplicationTemplateModularSmallSimpleText', 'CLKComplicationTemplateModularSmallStackImage', 'CLKComplicationTemplateModularSmallStackText', 'CLKComplicationTemplateUtilitarianLargeFlat', 'CLKComplicationTemplateUtilitarianSmallFlat', 'CLKComplicationTemplateUtilitarianSmallRingImage', 'CLKComplicationTemplateUtilitarianSmallRingText', 'CLKComplicationTemplateUtilitarianSmallSquare', 'CLKComplicationTimelineEntry', 'CLKDateTextProvider', 'CLKFullColorImageProvider', 'CLKGaugeProvider', 'CLKImageProvider', 'CLKRelativeDateTextProvider', 'CLKSimpleGaugeProvider', 'CLKSimpleTextProvider', 'CLKTextProvider', 'CLKTimeIntervalGaugeProvider', 'CLKTimeIntervalTextProvider', 'CLKTimeTextProvider', 'CLKWatchFaceLibrary', 'CLLocation', 'CLLocationManager', 'CLPlacemark', 'CLRegion', 'CLSActivity', 'CLSActivityItem', 'CLSBinaryItem', 'CLSContext', 'CLSDataStore', 'CLSObject', 'CLSProgressReportingCapability', 'CLSQuantityItem', 'CLSScoreItem', 'CLVisit', 'CMAccelerometerData', 'CMAltimeter', 'CMAltitudeData', 'CMAttitude', 'CMDeviceMotion', 'CMDyskineticSymptomResult', 'CMFallDetectionEvent', 'CMFallDetectionManager', 'CMGyroData', 'CMHeadphoneMotionManager', 'CMLogItem', 'CMMagnetometerData', 'CMMotionActivity', 'CMMotionActivityManager', 'CMMotionManager', 'CMMovementDisorderManager', 'CMPedometer', 'CMPedometerData', 'CMPedometerEvent', 'CMRecordedAccelerometerData', 'CMRecordedRotationRateData', 'CMRotationRateData', 'CMSensorDataList', 'CMSensorRecorder', 'CMStepCounter', 'CMTremorResult', 'CNChangeHistoryAddContactEvent', 'CNChangeHistoryAddGroupEvent', 'CNChangeHistoryAddMemberToGroupEvent', 'CNChangeHistoryAddSubgroupToGroupEvent', 'CNChangeHistoryDeleteContactEvent', 'CNChangeHistoryDeleteGroupEvent', 'CNChangeHistoryDropEverythingEvent', 'CNChangeHistoryEvent', 'CNChangeHistoryFetchRequest', 'CNChangeHistoryRemoveMemberFromGroupEvent', 'CNChangeHistoryRemoveSubgroupFromGroupEvent', 'CNChangeHistoryUpdateContactEvent', 'CNChangeHistoryUpdateGroupEvent', 'CNContact', 'CNContactFetchRequest', 'CNContactFormatter', 'CNContactPickerViewController', 'CNContactProperty', 'CNContactRelation', 'CNContactStore', 'CNContactVCardSerialization', 'CNContactViewController', 'CNContactsUserDefaults', 'CNContainer', 'CNFetchRequest', 'CNFetchResult', 'CNGroup', 'CNInstantMessageAddress', 'CNLabeledValue', 'CNMutableContact', 'CNMutableGroup', 'CNMutablePostalAddress', 'CNPhoneNumber', 'CNPostalAddress', 'CNPostalAddressFormatter', 'CNSaveRequest', 'CNSocialProfile', 'CPActionSheetTemplate', 'CPAlertAction', 'CPAlertTemplate', 'CPBarButton', 'CPButton', 'CPContact', 'CPContactCallButton', 'CPContactDirectionsButton', 'CPContactMessageButton', 'CPContactTemplate', 'CPDashboardButton', 'CPDashboardController', 'CPGridButton', 'CPGridTemplate', 'CPImageSet', 'CPInformationItem', 'CPInformationRatingItem', 'CPInformationTemplate', 'CPInterfaceController', 'CPListImageRowItem', 'CPListItem', 'CPListSection', 'CPListTemplate', 'CPManeuver', 'CPMapButton', 'CPMapTemplate', 'CPMessageComposeBarButton', 'CPMessageListItem', 'CPMessageListItemLeadingConfiguration', 'CPMessageListItemTrailingConfiguration', 'CPNavigationAlert', 'CPNavigationSession', 'CPNowPlayingAddToLibraryButton', 'CPNowPlayingButton', 'CPNowPlayingImageButton', 'CPNowPlayingMoreButton', 'CPNowPlayingPlaybackRateButton', 'CPNowPlayingRepeatButton', 'CPNowPlayingShuffleButton', 'CPNowPlayingTemplate', 'CPPointOfInterest', 'CPPointOfInterestTemplate', 'CPRouteChoice', 'CPSearchTemplate', 'CPSessionConfiguration', 'CPTabBarTemplate', 'CPTemplate', 'CPTemplateApplicationDashboardScene', 'CPTemplateApplicationScene', 'CPTextButton', 'CPTravelEstimates', 'CPTrip', 'CPTripPreviewTextConfiguration', 'CPVoiceControlState', 'CPVoiceControlTemplate', 'CPWindow', 'CSCustomAttributeKey', 'CSIndexExtensionRequestHandler', 'CSLocalizedString', 'CSPerson', 'CSSearchQuery', 'CSSearchableIndex', 'CSSearchableItem', 'CSSearchableItemAttributeSet', 'CTCall', 'CTCallCenter', 'CTCarrier', 'CTCellularData', 'CTCellularPlanProvisioning', 'CTCellularPlanProvisioningRequest', 'CTSubscriber', 'CTSubscriberInfo', 'CTTelephonyNetworkInfo', 'CXAction', 'CXAnswerCallAction', 'CXCall', 'CXCallAction', 'CXCallController', 'CXCallDirectoryExtensionContext', 'CXCallDirectoryManager', 'CXCallDirectoryProvider', 'CXCallObserver', 'CXCallUpdate', 'CXEndCallAction', 'CXHandle', 'CXPlayDTMFCallAction', 'CXProvider', 'CXProviderConfiguration', 'CXSetGroupCallAction', 'CXSetHeldCallAction', 'CXSetMutedCallAction', 'CXStartCallAction', 'CXTransaction', 'DCAppAttestService', 'DCDevice', 'EAAccessory', 'EAAccessoryManager', 'EAGLContext', 'EAGLSharegroup', 'EASession', 'EAWiFiUnconfiguredAccessory', 'EAWiFiUnconfiguredAccessoryBrowser', 'EKAlarm', 'EKCalendar', 'EKCalendarChooser', 'EKCalendarItem', 'EKEvent', 'EKEventEditViewController', 'EKEventStore', 'EKEventViewController', 'EKObject', 'EKParticipant', 'EKRecurrenceDayOfWeek', 'EKRecurrenceEnd', 'EKRecurrenceRule', 'EKReminder', 'EKSource', 'EKStructuredLocation', 'ENExposureConfiguration', 'ENExposureDaySummary', 'ENExposureDetectionSummary', 'ENExposureInfo', 'ENExposureSummaryItem', 'ENExposureWindow', 'ENManager', 'ENScanInstance', 'ENTemporaryExposureKey', 'EntityRotationGestureRecognizer', 'EntityScaleGestureRecognizer', 'EntityTranslationGestureRecognizer', 'FPUIActionExtensionContext', 'FPUIActionExtensionViewController', 'GCColor', 'GCController', 'GCControllerAxisInput', 'GCControllerButtonInput', 'GCControllerDirectionPad', 'GCControllerElement', 'GCControllerTouchpad', 'GCDeviceBattery', 'GCDeviceCursor', 'GCDeviceHaptics', 'GCDeviceLight', 'GCDirectionalGamepad', 'GCDualShockGamepad', 'GCEventViewController', 'GCExtendedGamepad', 'GCExtendedGamepadSnapshot', 'GCGamepad', 'GCGamepadSnapshot', 'GCKeyboard', 'GCKeyboardInput', 'GCMicroGamepad', 'GCMicroGamepadSnapshot', 'GCMotion', 'GCMouse', 'GCMouseInput', 'GCPhysicalInputProfile', 'GCXboxGamepad', 'GKARC4RandomSource', 'GKAccessPoint', 'GKAchievement', 'GKAchievementChallenge', 'GKAchievementDescription', 'GKAchievementViewController', 'GKAgent', 'GKAgent2D', 'GKAgent3D', 'GKBasePlayer', 'GKBehavior', 'GKBillowNoiseSource', 'GKChallenge', 'GKChallengeEventHandler', 'GKCheckerboardNoiseSource', 'GKCircleObstacle', 'GKCloudPlayer', 'GKCoherentNoiseSource', 'GKComponent', 'GKComponentSystem', 'GKCompositeBehavior', 'GKConstantNoiseSource', 'GKCylindersNoiseSource', 'GKDecisionNode', 'GKDecisionTree', 'GKEntity', 'GKFriendRequestComposeViewController', 'GKGameCenterViewController', 'GKGameSession', 'GKGameSessionSharingViewController', 'GKGaussianDistribution', 'GKGoal', 'GKGraph', 'GKGraphNode', 'GKGraphNode2D', 'GKGraphNode3D', 'GKGridGraph', 'GKGridGraphNode', 'GKInvite', 'GKLeaderboard', 'GKLeaderboardEntry', 'GKLeaderboardScore', 'GKLeaderboardSet', 'GKLeaderboardViewController', 'GKLinearCongruentialRandomSource', 'GKLocalPlayer', 'GKMatch', 'GKMatchRequest', 'GKMatchmaker', 'GKMatchmakerViewController', 'GKMersenneTwisterRandomSource', 'GKMeshGraph', 'GKMinmaxStrategist', 'GKMonteCarloStrategist', 'GKNSPredicateRule', 'GKNoise', 'GKNoiseMap', 'GKNoiseSource', 'GKNotificationBanner', 'GKObstacle', 'GKObstacleGraph', 'GKOctree', 'GKOctreeNode', 'GKPath', 'GKPeerPickerController', 'GKPerlinNoiseSource', 'GKPlayer', 'GKPolygonObstacle', 'GKQuadtree', 'GKQuadtreeNode', 'GKRTree', 'GKRandomDistribution', 'GKRandomSource', 'GKRidgedNoiseSource', 'GKRule', 'GKRuleSystem', 'GKSCNNodeComponent', 'GKSKNodeComponent', 'GKSavedGame', 'GKScene', 'GKScore', 'GKScoreChallenge', 'GKSession', 'GKShuffledDistribution', 'GKSphereObstacle', 'GKSpheresNoiseSource', 'GKState', 'GKStateMachine', 'GKTurnBasedEventHandler', 'GKTurnBasedExchangeReply', 'GKTurnBasedMatch', 'GKTurnBasedMatchmakerViewController', 'GKTurnBasedParticipant', 'GKVoiceChat', 'GKVoiceChatService', 'GKVoronoiNoiseSource', 'GLKBaseEffect', 'GLKEffectProperty', 'GLKEffectPropertyFog', 'GLKEffectPropertyLight', 'GLKEffectPropertyMaterial', 'GLKEffectPropertyTexture', 'GLKEffectPropertyTransform', 'GLKMesh', 'GLKMeshBuffer', 'GLKMeshBufferAllocator', 'GLKReflectionMapEffect', 'GLKSkyboxEffect', 'GLKSubmesh', 'GLKTextureInfo', 'GLKTextureLoader', 'GLKView', 'GLKViewController', 'HKActivityMoveModeObject', 'HKActivityRingView', 'HKActivitySummary', 'HKActivitySummaryQuery', 'HKActivitySummaryType', 'HKAnchoredObjectQuery', 'HKAudiogramSample', 'HKAudiogramSampleType', 'HKAudiogramSensitivityPoint', 'HKBiologicalSexObject', 'HKBloodTypeObject', 'HKCDADocument', 'HKCDADocumentSample', 'HKCategorySample', 'HKCategoryType', 'HKCharacteristicType', 'HKClinicalRecord', 'HKClinicalType', 'HKCorrelation', 'HKCorrelationQuery', 'HKCorrelationType', 'HKCumulativeQuantitySample', 'HKCumulativeQuantitySeriesSample', 'HKDeletedObject', 'HKDevice', 'HKDiscreteQuantitySample', 'HKDocumentQuery', 'HKDocumentSample', 'HKDocumentType', 'HKElectrocardiogram', 'HKElectrocardiogramQuery', 'HKElectrocardiogramType', 'HKElectrocardiogramVoltageMeasurement', 'HKFHIRResource', 'HKFHIRVersion', 'HKFitzpatrickSkinTypeObject', 'HKHealthStore', 'HKHeartbeatSeriesBuilder', 'HKHeartbeatSeriesQuery', 'HKHeartbeatSeriesSample', 'HKLiveWorkoutBuilder', 'HKLiveWorkoutDataSource', 'HKObject', 'HKObjectType', 'HKObserverQuery', 'HKQuantity', 'HKQuantitySample', 'HKQuantitySeriesSampleBuilder', 'HKQuantitySeriesSampleQuery', 'HKQuantityType', 'HKQuery', 'HKQueryAnchor', 'HKSample', 'HKSampleQuery', 'HKSampleType', 'HKSeriesBuilder', 'HKSeriesSample', 'HKSeriesType', 'HKSource', 'HKSourceQuery', 'HKSourceRevision', 'HKStatistics', 'HKStatisticsCollection', 'HKStatisticsCollectionQuery', 'HKStatisticsQuery', 'HKUnit', 'HKWheelchairUseObject', 'HKWorkout', 'HKWorkoutBuilder', 'HKWorkoutConfiguration', 'HKWorkoutEvent', 'HKWorkoutRoute', 'HKWorkoutRouteBuilder', 'HKWorkoutRouteQuery', 'HKWorkoutSession', 'HKWorkoutType', 'HMAccessControl', 'HMAccessory', 'HMAccessoryBrowser', 'HMAccessoryCategory', 'HMAccessoryOwnershipToken', 'HMAccessoryProfile', 'HMAccessorySetupPayload', 'HMAction', 'HMActionSet', 'HMAddAccessoryRequest', 'HMCalendarEvent', 'HMCameraAudioControl', 'HMCameraControl', 'HMCameraProfile', 'HMCameraSettingsControl', 'HMCameraSnapshot', 'HMCameraSnapshotControl', 'HMCameraSource', 'HMCameraStream', 'HMCameraStreamControl', 'HMCameraView', 'HMCharacteristic', 'HMCharacteristicEvent', 'HMCharacteristicMetadata', 'HMCharacteristicThresholdRangeEvent', 'HMCharacteristicWriteAction', 'HMDurationEvent', 'HMEvent', 'HMEventTrigger', 'HMHome', 'HMHomeAccessControl', 'HMHomeManager', 'HMLocationEvent', 'HMMutableCalendarEvent', 'HMMutableCharacteristicEvent', 'HMMutableCharacteristicThresholdRangeEvent', 'HMMutableDurationEvent', 'HMMutableLocationEvent', 'HMMutablePresenceEvent', 'HMMutableSignificantTimeEvent', 'HMNetworkConfigurationProfile', 'HMNumberRange', 'HMPresenceEvent', 'HMRoom', 'HMService', 'HMServiceGroup', 'HMSignificantTimeEvent', 'HMTimeEvent', 'HMTimerTrigger', 'HMTrigger', 'HMUser', 'HMZone', 'ICCameraDevice', 'ICCameraFile', 'ICCameraFolder', 'ICCameraItem', 'ICDevice', 'ICDeviceBrowser', 'ICScannerBandData', 'ICScannerDevice', 'ICScannerFeature', 'ICScannerFeatureBoolean', 'ICScannerFeatureEnumeration', 'ICScannerFeatureRange', 'ICScannerFeatureTemplate', 'ICScannerFunctionalUnit', 'ICScannerFunctionalUnitDocumentFeeder', 'ICScannerFunctionalUnitFlatbed', 'ICScannerFunctionalUnitNegativeTransparency', 'ICScannerFunctionalUnitPositiveTransparency', 'ILCallClassificationRequest', 'ILCallCommunication', 'ILClassificationRequest', 'ILClassificationResponse', 'ILClassificationUIExtensionContext', 'ILClassificationUIExtensionViewController', 'ILCommunication', 'ILMessageClassificationRequest', 'ILMessageCommunication', 'ILMessageFilterExtension', 'ILMessageFilterExtensionContext', 'ILMessageFilterQueryRequest', 'ILMessageFilterQueryResponse', 'ILNetworkResponse', 'INAccountTypeResolutionResult', 'INActivateCarSignalIntent', 'INActivateCarSignalIntentResponse', 'INAddMediaIntent', 'INAddMediaIntentResponse', 'INAddMediaMediaDestinationResolutionResult', 'INAddMediaMediaItemResolutionResult', 'INAddTasksIntent', 'INAddTasksIntentResponse', 'INAddTasksTargetTaskListResolutionResult', 'INAddTasksTemporalEventTriggerResolutionResult', 'INAirline', 'INAirport', 'INAirportGate', 'INAppendToNoteIntent', 'INAppendToNoteIntentResponse', 'INBalanceAmount', 'INBalanceTypeResolutionResult', 'INBillDetails', 'INBillPayee', 'INBillPayeeResolutionResult', 'INBillTypeResolutionResult', 'INBoatReservation', 'INBoatTrip', 'INBookRestaurantReservationIntent', 'INBookRestaurantReservationIntentResponse', 'INBooleanResolutionResult', 'INBusReservation', 'INBusTrip', 'INCallCapabilityResolutionResult', 'INCallDestinationTypeResolutionResult', 'INCallRecord', 'INCallRecordFilter', 'INCallRecordResolutionResult', 'INCallRecordTypeOptionsResolutionResult', 'INCallRecordTypeResolutionResult', 'INCancelRideIntent', 'INCancelRideIntentResponse', 'INCancelWorkoutIntent', 'INCancelWorkoutIntentResponse', 'INCar', 'INCarAirCirculationModeResolutionResult', 'INCarAudioSourceResolutionResult', 'INCarDefrosterResolutionResult', 'INCarHeadUnit', 'INCarSeatResolutionResult', 'INCarSignalOptionsResolutionResult', 'INCreateNoteIntent', 'INCreateNoteIntentResponse', 'INCreateTaskListIntent', 'INCreateTaskListIntentResponse', 'INCurrencyAmount', 'INCurrencyAmountResolutionResult', 'INDailyRoutineRelevanceProvider', 'INDateComponentsRange', 'INDateComponentsRangeResolutionResult', 'INDateComponentsResolutionResult', 'INDateRelevanceProvider', 'INDateSearchTypeResolutionResult', 'INDefaultCardTemplate', 'INDeleteTasksIntent', 'INDeleteTasksIntentResponse', 'INDeleteTasksTaskListResolutionResult', 'INDeleteTasksTaskResolutionResult', 'INDoubleResolutionResult', 'INEndWorkoutIntent', 'INEndWorkoutIntentResponse', 'INEnergyResolutionResult', 'INEnumResolutionResult', 'INExtension', 'INFile', 'INFileResolutionResult', 'INFlight', 'INFlightReservation', 'INGetAvailableRestaurantReservationBookingDefaultsIntent', 'INGetAvailableRestaurantReservationBookingDefaultsIntentResponse', 'INGetAvailableRestaurantReservationBookingsIntent', 'INGetAvailableRestaurantReservationBookingsIntentResponse', 'INGetCarLockStatusIntent', 'INGetCarLockStatusIntentResponse', 'INGetCarPowerLevelStatusIntent', 'INGetCarPowerLevelStatusIntentResponse', 'INGetReservationDetailsIntent', 'INGetReservationDetailsIntentResponse', 'INGetRestaurantGuestIntent', 'INGetRestaurantGuestIntentResponse', 'INGetRideStatusIntent', 'INGetRideStatusIntentResponse', 'INGetUserCurrentRestaurantReservationBookingsIntent', 'INGetUserCurrentRestaurantReservationBookingsIntentResponse', 'INGetVisualCodeIntent', 'INGetVisualCodeIntentResponse', 'INImage', 'INImageNoteContent', 'INIntegerResolutionResult', 'INIntent', 'INIntentResolutionResult', 'INIntentResponse', 'INInteraction', 'INLengthResolutionResult', 'INListCarsIntent', 'INListCarsIntentResponse', 'INListRideOptionsIntent', 'INListRideOptionsIntentResponse', 'INLocationRelevanceProvider', 'INLocationSearchTypeResolutionResult', 'INLodgingReservation', 'INMassResolutionResult', 'INMediaAffinityTypeResolutionResult', 'INMediaDestination', 'INMediaDestinationResolutionResult', 'INMediaItem', 'INMediaItemResolutionResult', 'INMediaSearch', 'INMediaUserContext', 'INMessage', 'INMessageAttributeOptionsResolutionResult', 'INMessageAttributeResolutionResult', 'INNote', 'INNoteContent', 'INNoteContentResolutionResult', 'INNoteContentTypeResolutionResult', 'INNoteResolutionResult', 'INNotebookItemTypeResolutionResult', 'INObject', 'INObjectCollection', 'INObjectResolutionResult', 'INObjectSection', 'INOutgoingMessageTypeResolutionResult', 'INParameter', 'INPauseWorkoutIntent', 'INPauseWorkoutIntentResponse', 'INPayBillIntent', 'INPayBillIntentResponse', 'INPaymentAccount', 'INPaymentAccountResolutionResult', 'INPaymentAmount', 'INPaymentAmountResolutionResult', 'INPaymentMethod', 'INPaymentMethodResolutionResult', 'INPaymentRecord', 'INPaymentStatusResolutionResult', 'INPerson', 'INPersonHandle', 'INPersonResolutionResult', 'INPlacemarkResolutionResult', 'INPlayMediaIntent', 'INPlayMediaIntentResponse', 'INPlayMediaMediaItemResolutionResult', 'INPlayMediaPlaybackSpeedResolutionResult', 'INPlaybackQueueLocationResolutionResult', 'INPlaybackRepeatModeResolutionResult', 'INPreferences', 'INPriceRange', 'INRadioTypeResolutionResult', 'INRecurrenceRule', 'INRelativeReferenceResolutionResult', 'INRelativeSettingResolutionResult', 'INRelevanceProvider', 'INRelevantShortcut', 'INRelevantShortcutStore', 'INRentalCar', 'INRentalCarReservation', 'INRequestPaymentCurrencyAmountResolutionResult', 'INRequestPaymentIntent', 'INRequestPaymentIntentResponse', 'INRequestPaymentPayerResolutionResult', 'INRequestRideIntent', 'INRequestRideIntentResponse', 'INReservation', 'INReservationAction', 'INRestaurant', 'INRestaurantGuest', 'INRestaurantGuestDisplayPreferences', 'INRestaurantGuestResolutionResult', 'INRestaurantOffer', 'INRestaurantReservation', 'INRestaurantReservationBooking', 'INRestaurantReservationUserBooking', 'INRestaurantResolutionResult', 'INResumeWorkoutIntent', 'INResumeWorkoutIntentResponse', 'INRideCompletionStatus', 'INRideDriver', 'INRideFareLineItem', 'INRideOption', 'INRidePartySizeOption', 'INRideStatus', 'INRideVehicle', 'INSaveProfileInCarIntent', 'INSaveProfileInCarIntentResponse', 'INSearchCallHistoryIntent', 'INSearchCallHistoryIntentResponse', 'INSearchForAccountsIntent', 'INSearchForAccountsIntentResponse', 'INSearchForBillsIntent', 'INSearchForBillsIntentResponse', 'INSearchForMediaIntent', 'INSearchForMediaIntentResponse', 'INSearchForMediaMediaItemResolutionResult', 'INSearchForMessagesIntent', 'INSearchForMessagesIntentResponse', 'INSearchForNotebookItemsIntent', 'INSearchForNotebookItemsIntentResponse', 'INSearchForPhotosIntent', 'INSearchForPhotosIntentResponse', 'INSeat', 'INSendMessageAttachment', 'INSendMessageIntent', 'INSendMessageIntentResponse', 'INSendMessageRecipientResolutionResult', 'INSendPaymentCurrencyAmountResolutionResult', 'INSendPaymentIntent', 'INSendPaymentIntentResponse', 'INSendPaymentPayeeResolutionResult', 'INSendRideFeedbackIntent', 'INSendRideFeedbackIntentResponse', 'INSetAudioSourceInCarIntent', 'INSetAudioSourceInCarIntentResponse', 'INSetCarLockStatusIntent', 'INSetCarLockStatusIntentResponse', 'INSetClimateSettingsInCarIntent', 'INSetClimateSettingsInCarIntentResponse', 'INSetDefrosterSettingsInCarIntent', 'INSetDefrosterSettingsInCarIntentResponse', 'INSetMessageAttributeIntent', 'INSetMessageAttributeIntentResponse', 'INSetProfileInCarIntent', 'INSetProfileInCarIntentResponse', 'INSetRadioStationIntent', 'INSetRadioStationIntentResponse', 'INSetSeatSettingsInCarIntent', 'INSetSeatSettingsInCarIntentResponse', 'INSetTaskAttributeIntent', 'INSetTaskAttributeIntentResponse', 'INSetTaskAttributeTemporalEventTriggerResolutionResult', 'INShortcut', 'INSnoozeTasksIntent', 'INSnoozeTasksIntentResponse', 'INSnoozeTasksTaskResolutionResult', 'INSpatialEventTrigger', 'INSpatialEventTriggerResolutionResult', 'INSpeakableString', 'INSpeakableStringResolutionResult', 'INSpeedResolutionResult', 'INStartAudioCallIntent', 'INStartAudioCallIntentResponse', 'INStartCallCallCapabilityResolutionResult', 'INStartCallCallRecordToCallBackResolutionResult', 'INStartCallContactResolutionResult', 'INStartCallIntent', 'INStartCallIntentResponse', 'INStartPhotoPlaybackIntent', 'INStartPhotoPlaybackIntentResponse', 'INStartVideoCallIntent', 'INStartVideoCallIntentResponse', 'INStartWorkoutIntent', 'INStartWorkoutIntentResponse', 'INStringResolutionResult', 'INTask', 'INTaskList', 'INTaskListResolutionResult', 'INTaskPriorityResolutionResult', 'INTaskResolutionResult', 'INTaskStatusResolutionResult', 'INTemperatureResolutionResult', 'INTemporalEventTrigger', 'INTemporalEventTriggerResolutionResult', 'INTemporalEventTriggerTypeOptionsResolutionResult', 'INTermsAndConditions', 'INTextNoteContent', 'INTicketedEvent', 'INTicketedEventReservation', 'INTimeIntervalResolutionResult', 'INTrainReservation', 'INTrainTrip', 'INTransferMoneyIntent', 'INTransferMoneyIntentResponse', 'INUIAddVoiceShortcutButton', 'INUIAddVoiceShortcutViewController', 'INUIEditVoiceShortcutViewController', 'INURLResolutionResult', 'INUpcomingMediaManager', 'INUpdateMediaAffinityIntent', 'INUpdateMediaAffinityIntentResponse', 'INUpdateMediaAffinityMediaItemResolutionResult', 'INUserContext', 'INVisualCodeTypeResolutionResult', 'INVocabulary', 'INVoiceShortcut', 'INVoiceShortcutCenter', 'INVolumeResolutionResult', 'INWorkoutGoalUnitTypeResolutionResult', 'INWorkoutLocationTypeResolutionResult', 'IOSurface', 'JSContext', 'JSManagedValue', 'JSValue', 'JSVirtualMachine', 'LAContext', 'LPLinkMetadata', 'LPLinkView', 'LPMetadataProvider', 'MCAdvertiserAssistant', 'MCBrowserViewController', 'MCNearbyServiceAdvertiser', 'MCNearbyServiceBrowser', 'MCPeerID', 'MCSession', 'MDLAnimatedMatrix4x4', 'MDLAnimatedQuaternion', 'MDLAnimatedQuaternionArray', 'MDLAnimatedScalar', 'MDLAnimatedScalarArray', 'MDLAnimatedValue', 'MDLAnimatedVector2', 'MDLAnimatedVector3', 'MDLAnimatedVector3Array', 'MDLAnimatedVector4', 'MDLAnimationBindComponent', 'MDLAreaLight', 'MDLAsset', 'MDLBundleAssetResolver', 'MDLCamera', 'MDLCheckerboardTexture', 'MDLColorSwatchTexture', 'MDLLight', 'MDLLightProbe', 'MDLMaterial', 'MDLMaterialProperty', 'MDLMaterialPropertyConnection', 'MDLMaterialPropertyGraph', 'MDLMaterialPropertyNode', 'MDLMatrix4x4Array', 'MDLMesh', 'MDLMeshBufferData', 'MDLMeshBufferDataAllocator', 'MDLMeshBufferMap', 'MDLMeshBufferZoneDefault', 'MDLNoiseTexture', 'MDLNormalMapTexture', 'MDLObject', 'MDLObjectContainer', 'MDLPackedJointAnimation', 'MDLPathAssetResolver', 'MDLPhotometricLight', 'MDLPhysicallyPlausibleLight', 'MDLPhysicallyPlausibleScatteringFunction', 'MDLRelativeAssetResolver', 'MDLScatteringFunction', 'MDLSkeleton', 'MDLSkyCubeTexture', 'MDLStereoscopicCamera', 'MDLSubmesh', 'MDLSubmeshTopology', 'MDLTexture', 'MDLTextureFilter', 'MDLTextureSampler', 'MDLTransform', 'MDLTransformMatrixOp', 'MDLTransformOrientOp', 'MDLTransformRotateOp', 'MDLTransformRotateXOp', 'MDLTransformRotateYOp', 'MDLTransformRotateZOp', 'MDLTransformScaleOp', 'MDLTransformStack', 'MDLTransformTranslateOp', 'MDLURLTexture', 'MDLVertexAttribute', 'MDLVertexAttributeData', 'MDLVertexBufferLayout', 'MDLVertexDescriptor', 'MDLVoxelArray', 'MFMailComposeViewController', 'MFMessageComposeViewController', 'MIDICIDeviceInfo', 'MIDICIDiscoveredNode', 'MIDICIDiscoveryManager', 'MIDICIProfile', 'MIDICIProfileState', 'MIDICIResponder', 'MIDICISession', 'MIDINetworkConnection', 'MIDINetworkHost', 'MIDINetworkSession', 'MKAnnotationView', 'MKCircle', 'MKCircleRenderer', 'MKCircleView', 'MKClusterAnnotation', 'MKCompassButton', 'MKDirections', 'MKDirectionsRequest', 'MKDirectionsResponse', 'MKDistanceFormatter', 'MKETAResponse', 'MKGeoJSONDecoder', 'MKGeoJSONFeature', 'MKGeodesicPolyline', 'MKGradientPolylineRenderer', 'MKLocalPointsOfInterestRequest', 'MKLocalSearch', 'MKLocalSearchCompleter', 'MKLocalSearchCompletion', 'MKLocalSearchRequest', 'MKLocalSearchResponse', 'MKMapCamera', 'MKMapCameraBoundary', 'MKMapCameraZoomRange', 'MKMapItem', 'MKMapSnapshot', 'MKMapSnapshotOptions', 'MKMapSnapshotter', 'MKMapView', 'MKMarkerAnnotationView', 'MKMultiPoint', 'MKMultiPolygon', 'MKMultiPolygonRenderer', 'MKMultiPolyline', 'MKMultiPolylineRenderer', 'MKOverlayPathRenderer', 'MKOverlayPathView', 'MKOverlayRenderer', 'MKOverlayView', 'MKPinAnnotationView', 'MKPitchControl', 'MKPlacemark', 'MKPointAnnotation', 'MKPointOfInterestFilter', 'MKPolygon', 'MKPolygonRenderer', 'MKPolygonView', 'MKPolyline', 'MKPolylineRenderer', 'MKPolylineView', 'MKReverseGeocoder', 'MKRoute', 'MKRouteStep', 'MKScaleView', 'MKShape', 'MKTileOverlay', 'MKTileOverlayRenderer', 'MKUserLocation', 'MKUserLocationView', 'MKUserTrackingBarButtonItem', 'MKUserTrackingButton', 'MKZoomControl', 'MLArrayBatchProvider', 'MLCActivationDescriptor', 'MLCActivationLayer', 'MLCArithmeticLayer', 'MLCBatchNormalizationLayer', 'MLCConcatenationLayer', 'MLCConvolutionDescriptor', 'MLCConvolutionLayer', 'MLCDevice', 'MLCDropoutLayer', 'MLCEmbeddingDescriptor', 'MLCEmbeddingLayer', 'MLCFullyConnectedLayer', 'MLCGramMatrixLayer', 'MLCGraph', 'MLCGroupNormalizationLayer', 'MLCInferenceGraph', 'MLCInstanceNormalizationLayer', 'MLCLSTMDescriptor', 'MLCLSTMLayer', 'MLCLayer', 'MLCLayerNormalizationLayer', 'MLCLossDescriptor', 'MLCLossLayer', 'MLCMatMulDescriptor', 'MLCMatMulLayer', 'MLCMultiheadAttentionDescriptor', 'MLCMultiheadAttentionLayer', 'MLCPaddingLayer', 'MLCPoolingDescriptor', 'MLCPoolingLayer', 'MLCReductionLayer', 'MLCReshapeLayer', 'MLCSliceLayer', 'MLCSoftmaxLayer', 'MLCSplitLayer', 'MLCTensor', 'MLCTensorData', 'MLCTensorDescriptor', 'MLCTensorOptimizerDeviceData', 'MLCTensorParameter', 'MLCTrainingGraph', 'MLCTransposeLayer', 'MLCUpsampleLayer', 'MLCYOLOLossDescriptor', 'MLCYOLOLossLayer', 'MLDictionaryConstraint', 'MLDictionaryFeatureProvider', 'MLFeatureDescription', 'MLFeatureValue', 'MLImageConstraint', 'MLImageSize', 'MLImageSizeConstraint', 'MLKey', 'MLMetricKey', 'MLModel', 'MLModelCollection', 'MLModelCollectionEntry', 'MLModelConfiguration', 'MLModelDescription', 'MLMultiArray', 'MLMultiArrayConstraint', 'MLMultiArrayShapeConstraint', 'MLNumericConstraint', 'MLParameterDescription', 'MLParameterKey', 'MLPredictionOptions', 'MLSequence', 'MLSequenceConstraint', 'MLTask', 'MLUpdateContext', 'MLUpdateProgressHandlers', 'MLUpdateTask', 'MPChangeLanguageOptionCommandEvent', 'MPChangePlaybackPositionCommand', 'MPChangePlaybackPositionCommandEvent', 'MPChangePlaybackRateCommand', 'MPChangePlaybackRateCommandEvent', 'MPChangeRepeatModeCommand', 'MPChangeRepeatModeCommandEvent', 'MPChangeShuffleModeCommand', 'MPChangeShuffleModeCommandEvent', 'MPContentItem', 'MPFeedbackCommand', 'MPFeedbackCommandEvent', 'MPMediaEntity', 'MPMediaItem', 'MPMediaItemArtwork', 'MPMediaItemCollection', 'MPMediaLibrary', 'MPMediaPickerController', 'MPMediaPlaylist', 'MPMediaPlaylistCreationMetadata', 'MPMediaPredicate', 'MPMediaPropertyPredicate', 'MPMediaQuery', 'MPMediaQuerySection', 'MPMovieAccessLog', 'MPMovieAccessLogEvent', 'MPMovieErrorLog', 'MPMovieErrorLogEvent', 'MPMoviePlayerController', 'MPMoviePlayerViewController', 'MPMusicPlayerApplicationController', 'MPMusicPlayerController', 'MPMusicPlayerControllerMutableQueue', 'MPMusicPlayerControllerQueue', 'MPMusicPlayerMediaItemQueueDescriptor', 'MPMusicPlayerPlayParameters', 'MPMusicPlayerPlayParametersQueueDescriptor', 'MPMusicPlayerQueueDescriptor', 'MPMusicPlayerStoreQueueDescriptor', 'MPNowPlayingInfoCenter', 'MPNowPlayingInfoLanguageOption', 'MPNowPlayingInfoLanguageOptionGroup', 'MPNowPlayingSession', 'MPPlayableContentManager', 'MPPlayableContentManagerContext', 'MPRatingCommand', 'MPRatingCommandEvent', 'MPRemoteCommand', 'MPRemoteCommandCenter', 'MPRemoteCommandEvent', 'MPSGraph', 'MPSGraphConvolution2DOpDescriptor', 'MPSGraphDepthwiseConvolution2DOpDescriptor', 'MPSGraphDevice', 'MPSGraphExecutionDescriptor', 'MPSGraphOperation', 'MPSGraphPooling2DOpDescriptor', 'MPSGraphShapedType', 'MPSGraphTensor', 'MPSGraphTensorData', 'MPSGraphVariableOp', 'MPSeekCommandEvent', 'MPSkipIntervalCommand', 'MPSkipIntervalCommandEvent', 'MPTimedMetadata', 'MPVolumeView', 'MSConversation', 'MSMessage', 'MSMessageLayout', 'MSMessageLiveLayout', 'MSMessageTemplateLayout', 'MSMessagesAppViewController', 'MSServiceAccount', 'MSSession', 'MSSetupSession', 'MSSticker', 'MSStickerBrowserView', 'MSStickerBrowserViewController', 'MSStickerView', 'MTKMesh', 'MTKMeshBuffer', 'MTKMeshBufferAllocator', 'MTKSubmesh', 'MTKTextureLoader', 'MTKView', 'MTLAccelerationStructureBoundingBoxGeometryDescriptor', 'MTLAccelerationStructureDescriptor', 'MTLAccelerationStructureGeometryDescriptor', 'MTLAccelerationStructureTriangleGeometryDescriptor', 'MTLArgument', 'MTLArgumentDescriptor', 'MTLArrayType', 'MTLAttribute', 'MTLAttributeDescriptor', 'MTLAttributeDescriptorArray', 'MTLBinaryArchiveDescriptor', 'MTLBlitPassDescriptor', 'MTLBlitPassSampleBufferAttachmentDescriptor', 'MTLBlitPassSampleBufferAttachmentDescriptorArray', 'MTLBufferLayoutDescriptor', 'MTLBufferLayoutDescriptorArray', 'MTLCaptureDescriptor', 'MTLCaptureManager', 'MTLCommandBufferDescriptor', 'MTLCompileOptions', 'MTLComputePassDescriptor', 'MTLComputePassSampleBufferAttachmentDescriptor', 'MTLComputePassSampleBufferAttachmentDescriptorArray', 'MTLComputePipelineDescriptor', 'MTLComputePipelineReflection', 'MTLCounterSampleBufferDescriptor', 'MTLDepthStencilDescriptor', 'MTLFunctionConstant', 'MTLFunctionConstantValues', 'MTLFunctionDescriptor', 'MTLHeapDescriptor', 'MTLIndirectCommandBufferDescriptor', 'MTLInstanceAccelerationStructureDescriptor', 'MTLIntersectionFunctionDescriptor', 'MTLIntersectionFunctionTableDescriptor', 'MTLLinkedFunctions', 'MTLPipelineBufferDescriptor', 'MTLPipelineBufferDescriptorArray', 'MTLPointerType', 'MTLPrimitiveAccelerationStructureDescriptor', 'MTLRasterizationRateLayerArray', 'MTLRasterizationRateLayerDescriptor', 'MTLRasterizationRateMapDescriptor', 'MTLRasterizationRateSampleArray', 'MTLRenderPassAttachmentDescriptor', 'MTLRenderPassColorAttachmentDescriptor', 'MTLRenderPassColorAttachmentDescriptorArray', 'MTLRenderPassDepthAttachmentDescriptor', 'MTLRenderPassDescriptor', 'MTLRenderPassSampleBufferAttachmentDescriptor', 'MTLRenderPassSampleBufferAttachmentDescriptorArray', 'MTLRenderPassStencilAttachmentDescriptor', 'MTLRenderPipelineColorAttachmentDescriptor', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MTLRenderPipelineDescriptor', 'MTLRenderPipelineReflection', 'MTLResourceStatePassDescriptor', 'MTLResourceStatePassSampleBufferAttachmentDescriptor', 'MTLResourceStatePassSampleBufferAttachmentDescriptorArray', 'MTLSamplerDescriptor', 'MTLSharedEventHandle', 'MTLSharedEventListener', 'MTLSharedTextureHandle', 'MTLStageInputOutputDescriptor', 'MTLStencilDescriptor', 'MTLStructMember', 'MTLStructType', 'MTLTextureDescriptor', 'MTLTextureReferenceType', 'MTLTileRenderPipelineColorAttachmentDescriptor', 'MTLTileRenderPipelineColorAttachmentDescriptorArray', 'MTLTileRenderPipelineDescriptor', 'MTLType', 'MTLVertexAttribute', 'MTLVertexAttributeDescriptor', 'MTLVertexAttributeDescriptorArray', 'MTLVertexBufferLayoutDescriptor', 'MTLVertexBufferLayoutDescriptorArray', 'MTLVertexDescriptor', 'MTLVisibleFunctionTableDescriptor', 'MXAnimationMetric', 'MXAppExitMetric', 'MXAppLaunchMetric', 'MXAppResponsivenessMetric', 'MXAppRunTimeMetric', 'MXAverage', 'MXBackgroundExitData', 'MXCPUExceptionDiagnostic', 'MXCPUMetric', 'MXCallStackTree', 'MXCellularConditionMetric', 'MXCrashDiagnostic', 'MXDiagnostic', 'MXDiagnosticPayload', 'MXDiskIOMetric', 'MXDiskWriteExceptionDiagnostic', 'MXDisplayMetric', 'MXForegroundExitData', 'MXGPUMetric', 'MXHangDiagnostic', 'MXHistogram', 'MXHistogramBucket', 'MXLocationActivityMetric', 'MXMemoryMetric', 'MXMetaData', 'MXMetric', 'MXMetricManager', 'MXMetricPayload', 'MXNetworkTransferMetric', 'MXSignpostIntervalData', 'MXSignpostMetric', 'MXUnitAveragePixelLuminance', 'MXUnitSignalBars', 'MyClass', 'NCWidgetController', 'NEAppProxyFlow', 'NEAppProxyProvider', 'NEAppProxyProviderManager', 'NEAppProxyTCPFlow', 'NEAppProxyUDPFlow', 'NEAppPushManager', 'NEAppPushProvider', 'NEAppRule', 'NEDNSOverHTTPSSettings', 'NEDNSOverTLSSettings', 'NEDNSProxyManager', 'NEDNSProxyProvider', 'NEDNSProxyProviderProtocol', 'NEDNSSettings', 'NEDNSSettingsManager', 'NEEvaluateConnectionRule', 'NEFilterBrowserFlow', 'NEFilterControlProvider', 'NEFilterControlVerdict', 'NEFilterDataProvider', 'NEFilterDataVerdict', 'NEFilterFlow', 'NEFilterManager', 'NEFilterNewFlowVerdict', 'NEFilterPacketContext', 'NEFilterPacketProvider', 'NEFilterProvider', 'NEFilterProviderConfiguration', 'NEFilterRemediationVerdict', 'NEFilterReport', 'NEFilterRule', 'NEFilterSettings', 'NEFilterSocketFlow', 'NEFilterVerdict', 'NEFlowMetaData', 'NEHotspotConfiguration', 'NEHotspotConfigurationManager', 'NEHotspotEAPSettings', 'NEHotspotHS20Settings', 'NEHotspotHelper', 'NEHotspotHelperCommand', 'NEHotspotHelperResponse', 'NEHotspotNetwork', 'NEIPv4Route', 'NEIPv4Settings', 'NEIPv6Route', 'NEIPv6Settings', 'NENetworkRule', 'NEOnDemandRule', 'NEOnDemandRuleConnect', 'NEOnDemandRuleDisconnect', 'NEOnDemandRuleEvaluateConnection', 'NEOnDemandRuleIgnore', 'NEPacket', 'NEPacketTunnelFlow', 'NEPacketTunnelNetworkSettings', 'NEPacketTunnelProvider', 'NEProvider', 'NEProxyServer', 'NEProxySettings', 'NETransparentProxyManager', 'NETransparentProxyNetworkSettings', 'NETransparentProxyProvider', 'NETunnelNetworkSettings', 'NETunnelProvider', 'NETunnelProviderManager', 'NETunnelProviderProtocol', 'NETunnelProviderSession', 'NEVPNConnection', 'NEVPNIKEv2SecurityAssociationParameters', 'NEVPNManager', 'NEVPNProtocol', 'NEVPNProtocolIKEv2', 'NEVPNProtocolIPSec', 'NFCISO15693CustomCommandConfiguration', 'NFCISO15693ReadMultipleBlocksConfiguration', 'NFCISO15693ReaderSession', 'NFCISO7816APDU', 'NFCNDEFMessage', 'NFCNDEFPayload', 'NFCNDEFReaderSession', 'NFCReaderSession', 'NFCTagCommandConfiguration', 'NFCTagReaderSession', 'NFCVASCommandConfiguration', 'NFCVASReaderSession', 'NFCVASResponse', 'NIConfiguration', 'NIDiscoveryToken', 'NINearbyObject', 'NINearbyPeerConfiguration', 'NISession', 'NKAssetDownload', 'NKIssue', 'NKLibrary', 'NLEmbedding', 'NLGazetteer', 'NLLanguageRecognizer', 'NLModel', 'NLModelConfiguration', 'NLTagger', 'NLTokenizer', 'NSArray', 'NSAssertionHandler', 'NSAsynchronousFetchRequest', 'NSAsynchronousFetchResult', 'NSAtomicStore', 'NSAtomicStoreCacheNode', 'NSAttributeDescription', 'NSAttributedString', 'NSAutoreleasePool', 'NSBatchDeleteRequest', 'NSBatchDeleteResult', 'NSBatchInsertRequest', 'NSBatchInsertResult', 'NSBatchUpdateRequest', 'NSBatchUpdateResult', 'NSBlockOperation', 'NSBundle', 'NSBundleResourceRequest', 'NSByteCountFormatter', 'NSCache', 'NSCachedURLResponse', 'NSCalendar', 'NSCharacterSet', 'NSCoder', 'NSCollectionLayoutAnchor', 'NSCollectionLayoutBoundarySupplementaryItem', 'NSCollectionLayoutDecorationItem', 'NSCollectionLayoutDimension', 'NSCollectionLayoutEdgeSpacing', 'NSCollectionLayoutGroup', 'NSCollectionLayoutGroupCustomItem', 'NSCollectionLayoutItem', 'NSCollectionLayoutSection', 'NSCollectionLayoutSize', 'NSCollectionLayoutSpacing', 'NSCollectionLayoutSupplementaryItem', 'NSComparisonPredicate', 'NSCompoundPredicate', 'NSCondition', 'NSConditionLock', 'NSConstantString', 'NSConstraintConflict', 'NSCoreDataCoreSpotlightDelegate', 'NSCountedSet', 'NSData', 'NSDataAsset', 'NSDataDetector', 'NSDate', 'NSDateComponents', 'NSDateComponentsFormatter', 'NSDateFormatter', 'NSDateInterval', 'NSDateIntervalFormatter', 'NSDecimalNumber', 'NSDecimalNumberHandler', 'NSDerivedAttributeDescription', 'NSDictionary', 'NSDiffableDataSourceSectionSnapshot', 'NSDiffableDataSourceSectionTransaction', 'NSDiffableDataSourceSnapshot', 'NSDiffableDataSourceTransaction', 'NSDimension', 'NSDirectoryEnumerator', 'NSEnergyFormatter', 'NSEntityDescription', 'NSEntityMapping', 'NSEntityMigrationPolicy', 'NSEnumerator', 'NSError', 'NSEvent', 'NSException', 'NSExpression', 'NSExpressionDescription', 'NSExtensionContext', 'NSExtensionItem', 'NSFetchIndexDescription', 'NSFetchIndexElementDescription', 'NSFetchRequest', 'NSFetchRequestExpression', 'NSFetchedPropertyDescription', 'NSFetchedResultsController', 'NSFileAccessIntent', 'NSFileCoordinator', 'NSFileHandle', 'NSFileManager', 'NSFileProviderDomain', 'NSFileProviderExtension', 'NSFileProviderManager', 'NSFileProviderService', 'NSFileSecurity', 'NSFileVersion', 'NSFileWrapper', 'NSFormatter', 'NSHTTPCookie', 'NSHTTPCookieStorage', 'NSHTTPURLResponse', 'NSHashTable', 'NSISO8601DateFormatter', 'NSIncrementalStore', 'NSIncrementalStoreNode', 'NSIndexPath', 'NSIndexSet', 'NSInputStream', 'NSInvocation', 'NSInvocationOperation', 'NSItemProvider', 'NSJSONSerialization', 'NSKeyedArchiver', 'NSKeyedUnarchiver', 'NSLayoutAnchor', 'NSLayoutConstraint', 'NSLayoutDimension', 'NSLayoutManager', 'NSLayoutXAxisAnchor', 'NSLayoutYAxisAnchor', 'NSLengthFormatter', 'NSLinguisticTagger', 'NSListFormatter', 'NSLocale', 'NSLock', 'NSMachPort', 'NSManagedObject', 'NSManagedObjectContext', 'NSManagedObjectID', 'NSManagedObjectModel', 'NSMapTable', 'NSMappingModel', 'NSMassFormatter', 'NSMeasurement', 'NSMeasurementFormatter', 'NSMenuToolbarItem', 'NSMergeConflict', 'NSMergePolicy', 'NSMessagePort', 'NSMetadataItem', 'NSMetadataQuery', 'NSMetadataQueryAttributeValueTuple', 'NSMetadataQueryResultGroup', 'NSMethodSignature', 'NSMigrationManager', 'NSMutableArray', 'NSMutableAttributedString', 'NSMutableCharacterSet', 'NSMutableData', 'NSMutableDictionary', 'NSMutableIndexSet', 'NSMutableOrderedSet', 'NSMutableParagraphStyle', 'NSMutableSet', 'NSMutableString', 'NSMutableURLRequest', 'NSNetService', 'NSNetServiceBrowser', 'NSNotification', 'NSNotificationCenter', 'NSNotificationQueue', 'NSNull', 'NSNumber', 'NSNumberFormatter', 'NSObject', 'NSOperation', 'NSOperationQueue', 'NSOrderedCollectionChange', 'NSOrderedCollectionDifference', 'NSOrderedSet', 'NSOrthography', 'NSOutputStream', 'NSParagraphStyle', 'NSPersistentCloudKitContainer', 'NSPersistentCloudKitContainerEvent', 'NSPersistentCloudKitContainerEventRequest', 'NSPersistentCloudKitContainerEventResult', 'NSPersistentCloudKitContainerOptions', 'NSPersistentContainer', 'NSPersistentHistoryChange', 'NSPersistentHistoryChangeRequest', 'NSPersistentHistoryResult', 'NSPersistentHistoryToken', 'NSPersistentHistoryTransaction', 'NSPersistentStore', 'NSPersistentStoreAsynchronousResult', 'NSPersistentStoreCoordinator', 'NSPersistentStoreDescription', 'NSPersistentStoreRequest', 'NSPersistentStoreResult', 'NSPersonNameComponents', 'NSPersonNameComponentsFormatter', 'NSPipe', 'NSPointerArray', 'NSPointerFunctions', 'NSPort', 'NSPredicate', 'NSProcessInfo', 'NSProgress', 'NSPropertyDescription', 'NSPropertyListSerialization', 'NSPropertyMapping', 'NSProxy', 'NSPurgeableData', 'NSQueryGenerationToken', 'NSRecursiveLock', 'NSRegularExpression', 'NSRelationshipDescription', 'NSRelativeDateTimeFormatter', 'NSRunLoop', 'NSSaveChangesRequest', 'NSScanner', 'NSSecureUnarchiveFromDataTransformer', 'NSSet', 'NSShadow', 'NSSharingServicePickerToolbarItem', 'NSSharingServicePickerTouchBarItem', 'NSSimpleCString', 'NSSocketPort', 'NSSortDescriptor', 'NSStream', 'NSString', 'NSStringDrawingContext', 'NSTextAttachment', 'NSTextCheckingResult', 'NSTextContainer', 'NSTextStorage', 'NSTextTab', 'NSThread', 'NSTimeZone', 'NSTimer', 'NSToolbarItem', 'NSURL', 'NSURLAuthenticationChallenge', 'NSURLCache', 'NSURLComponents', 'NSURLConnection', 'NSURLCredential', 'NSURLCredentialStorage', 'NSURLProtectionSpace', 'NSURLProtocol', 'NSURLQueryItem', 'NSURLRequest', 'NSURLResponse', 'NSURLSession', 'NSURLSessionConfiguration', 'NSURLSessionDataTask', 'NSURLSessionDownloadTask', 'NSURLSessionStreamTask', 'NSURLSessionTask', 'NSURLSessionTaskMetrics', 'NSURLSessionTaskTransactionMetrics', 'NSURLSessionUploadTask', 'NSURLSessionWebSocketMessage', 'NSURLSessionWebSocketTask', 'NSUUID', 'NSUbiquitousKeyValueStore', 'NSUndoManager', 'NSUnit', 'NSUnitAcceleration', 'NSUnitAngle', 'NSUnitArea', 'NSUnitConcentrationMass', 'NSUnitConverter', 'NSUnitConverterLinear', 'NSUnitDispersion', 'NSUnitDuration', 'NSUnitElectricCharge', 'NSUnitElectricCurrent', 'NSUnitElectricPotentialDifference', 'NSUnitElectricResistance', 'NSUnitEnergy', 'NSUnitFrequency', 'NSUnitFuelEfficiency', 'NSUnitIlluminance', 'NSUnitInformationStorage', 'NSUnitLength', 'NSUnitMass', 'NSUnitPower', 'NSUnitPressure', 'NSUnitSpeed', 'NSUnitTemperature', 'NSUnitVolume', 'NSUserActivity', 'NSUserDefaults', 'NSValue', 'NSValueTransformer', 'NSXMLParser', 'NSXPCCoder', 'NSXPCConnection', 'NSXPCInterface', 'NSXPCListener', 'NSXPCListenerEndpoint', 'NWBonjourServiceEndpoint', 'NWEndpoint', 'NWHostEndpoint', 'NWPath', 'NWTCPConnection', 'NWTLSParameters', 'NWUDPSession', 'OSLogEntry', 'OSLogEntryActivity', 'OSLogEntryBoundary', 'OSLogEntryLog', 'OSLogEntrySignpost', 'OSLogEnumerator', 'OSLogMessageComponent', 'OSLogPosition', 'OSLogStore', 'PDFAction', 'PDFActionGoTo', 'PDFActionNamed', 'PDFActionRemoteGoTo', 'PDFActionResetForm', 'PDFActionURL', 'PDFAnnotation', 'PDFAppearanceCharacteristics', 'PDFBorder', 'PDFDestination', 'PDFDocument', 'PDFOutline', 'PDFPage', 'PDFSelection', 'PDFThumbnailView', 'PDFView', 'PHAdjustmentData', 'PHAsset', 'PHAssetChangeRequest', 'PHAssetCollection', 'PHAssetCollectionChangeRequest', 'PHAssetCreationRequest', 'PHAssetResource', 'PHAssetResourceCreationOptions', 'PHAssetResourceManager', 'PHAssetResourceRequestOptions', 'PHCachingImageManager', 'PHChange', 'PHChangeRequest', 'PHCloudIdentifier', 'PHCollection', 'PHCollectionList', 'PHCollectionListChangeRequest', 'PHContentEditingInput', 'PHContentEditingInputRequestOptions', 'PHContentEditingOutput', 'PHEditingExtensionContext', 'PHFetchOptions', 'PHFetchResult', 'PHFetchResultChangeDetails', 'PHImageManager', 'PHImageRequestOptions', 'PHLivePhoto', 'PHLivePhotoEditingContext', 'PHLivePhotoRequestOptions', 'PHLivePhotoView', 'PHObject', 'PHObjectChangeDetails', 'PHObjectPlaceholder', 'PHPhotoLibrary', 'PHPickerConfiguration', 'PHPickerFilter', 'PHPickerResult', 'PHPickerViewController', 'PHProject', 'PHProjectChangeRequest', 'PHVideoRequestOptions', 'PKAddCarKeyPassConfiguration', 'PKAddPassButton', 'PKAddPassesViewController', 'PKAddPaymentPassRequest', 'PKAddPaymentPassRequestConfiguration', 'PKAddPaymentPassViewController', 'PKAddSecureElementPassConfiguration', 'PKAddSecureElementPassViewController', 'PKAddShareablePassConfiguration', 'PKBarcodeEventConfigurationRequest', 'PKBarcodeEventMetadataRequest', 'PKBarcodeEventMetadataResponse', 'PKBarcodeEventSignatureRequest', 'PKBarcodeEventSignatureResponse', 'PKCanvasView', 'PKContact', 'PKDisbursementAuthorizationController', 'PKDisbursementRequest', 'PKDisbursementVoucher', 'PKDrawing', 'PKEraserTool', 'PKFloatRange', 'PKInk', 'PKInkingTool', 'PKIssuerProvisioningExtensionHandler', 'PKIssuerProvisioningExtensionPassEntry', 'PKIssuerProvisioningExtensionPaymentPassEntry', 'PKIssuerProvisioningExtensionStatus', 'PKLabeledValue', 'PKLassoTool', 'PKObject', 'PKPass', 'PKPassLibrary', 'PKPayment', 'PKPaymentAuthorizationController', 'PKPaymentAuthorizationResult', 'PKPaymentAuthorizationViewController', 'PKPaymentButton', 'PKPaymentInformationEventExtension', 'PKPaymentMerchantSession', 'PKPaymentMethod', 'PKPaymentPass', 'PKPaymentRequest', 'PKPaymentRequestMerchantSessionUpdate', 'PKPaymentRequestPaymentMethodUpdate', 'PKPaymentRequestShippingContactUpdate', 'PKPaymentRequestShippingMethodUpdate', 'PKPaymentRequestUpdate', 'PKPaymentSummaryItem', 'PKPaymentToken', 'PKPushCredentials', 'PKPushPayload', 'PKPushRegistry', 'PKSecureElementPass', 'PKShareablePassMetadata', 'PKShippingMethod', 'PKStroke', 'PKStrokePath', 'PKStrokePoint', 'PKSuicaPassProperties', 'PKTool', 'PKToolPicker', 'PKTransitPassProperties', 'QLFileThumbnailRequest', 'QLPreviewController', 'QLThumbnailGenerationRequest', 'QLThumbnailGenerator', 'QLThumbnailProvider', 'QLThumbnailReply', 'QLThumbnailRepresentation', 'RPBroadcastActivityController', 'RPBroadcastActivityViewController', 'RPBroadcastConfiguration', 'RPBroadcastController', 'RPBroadcastHandler', 'RPBroadcastMP4ClipHandler', 'RPBroadcastSampleHandler', 'RPPreviewViewController', 'RPScreenRecorder', 'RPSystemBroadcastPickerView', 'SCNAccelerationConstraint', 'SCNAction', 'SCNAnimation', 'SCNAnimationEvent', 'SCNAnimationPlayer', 'SCNAudioPlayer', 'SCNAudioSource', 'SCNAvoidOccluderConstraint', 'SCNBillboardConstraint', 'SCNBox', 'SCNCamera', 'SCNCameraController', 'SCNCapsule', 'SCNCone', 'SCNConstraint', 'SCNCylinder', 'SCNDistanceConstraint', 'SCNFloor', 'SCNGeometry', 'SCNGeometryElement', 'SCNGeometrySource', 'SCNGeometryTessellator', 'SCNHitTestResult', 'SCNIKConstraint', 'SCNLevelOfDetail', 'SCNLight', 'SCNLookAtConstraint', 'SCNMaterial', 'SCNMaterialProperty', 'SCNMorpher', 'SCNNode', 'SCNParticlePropertyController', 'SCNParticleSystem', 'SCNPhysicsBallSocketJoint', 'SCNPhysicsBehavior', 'SCNPhysicsBody', 'SCNPhysicsConeTwistJoint', 'SCNPhysicsContact', 'SCNPhysicsField', 'SCNPhysicsHingeJoint', 'SCNPhysicsShape', 'SCNPhysicsSliderJoint', 'SCNPhysicsVehicle', 'SCNPhysicsVehicleWheel', 'SCNPhysicsWorld', 'SCNPlane', 'SCNProgram', 'SCNPyramid', 'SCNReferenceNode', 'SCNRenderer', 'SCNReplicatorConstraint', 'SCNScene', 'SCNSceneSource', 'SCNShape', 'SCNSkinner', 'SCNSliderConstraint', 'SCNSphere', 'SCNTechnique', 'SCNText', 'SCNTimingFunction', 'SCNTorus', 'SCNTransaction', 'SCNTransformConstraint', 'SCNTube', 'SCNView', 'SFAcousticFeature', 'SFAuthenticationSession', 'SFContentBlockerManager', 'SFContentBlockerState', 'SFSafariViewController', 'SFSafariViewControllerConfiguration', 'SFSpeechAudioBufferRecognitionRequest', 'SFSpeechRecognitionRequest', 'SFSpeechRecognitionResult', 'SFSpeechRecognitionTask', 'SFSpeechRecognizer', 'SFSpeechURLRecognitionRequest', 'SFTranscription', 'SFTranscriptionSegment', 'SFVoiceAnalytics', 'SK3DNode', 'SKAction', 'SKAdNetwork', 'SKArcadeService', 'SKAttribute', 'SKAttributeValue', 'SKAudioNode', 'SKCameraNode', 'SKCloudServiceController', 'SKCloudServiceSetupViewController', 'SKConstraint', 'SKCropNode', 'SKDownload', 'SKEffectNode', 'SKEmitterNode', 'SKFieldNode', 'SKKeyframeSequence', 'SKLabelNode', 'SKLightNode', 'SKMutablePayment', 'SKMutableTexture', 'SKNode', 'SKOverlay', 'SKOverlayAppClipConfiguration', 'SKOverlayAppConfiguration', 'SKOverlayConfiguration', 'SKOverlayTransitionContext', 'SKPayment', 'SKPaymentDiscount', 'SKPaymentQueue', 'SKPaymentTransaction', 'SKPhysicsBody', 'SKPhysicsContact', 'SKPhysicsJoint', 'SKPhysicsJointFixed', 'SKPhysicsJointLimit', 'SKPhysicsJointPin', 'SKPhysicsJointSliding', 'SKPhysicsJointSpring', 'SKPhysicsWorld', 'SKProduct', 'SKProductDiscount', 'SKProductStorePromotionController', 'SKProductSubscriptionPeriod', 'SKProductsRequest', 'SKProductsResponse', 'SKRange', 'SKReachConstraints', 'SKReceiptRefreshRequest', 'SKReferenceNode', 'SKRegion', 'SKRenderer', 'SKRequest', 'SKScene', 'SKShader', 'SKShapeNode', 'SKSpriteNode', 'SKStoreProductViewController', 'SKStoreReviewController', 'SKStorefront', 'SKTexture', 'SKTextureAtlas', 'SKTileDefinition', 'SKTileGroup', 'SKTileGroupRule', 'SKTileMapNode', 'SKTileSet', 'SKTransformNode', 'SKTransition', 'SKUniform', 'SKVideoNode', 'SKView', 'SKWarpGeometry', 'SKWarpGeometryGrid', 'SLComposeServiceViewController', 'SLComposeSheetConfigurationItem', 'SLComposeViewController', 'SLRequest', 'SNAudioFileAnalyzer', 'SNAudioStreamAnalyzer', 'SNClassification', 'SNClassificationResult', 'SNClassifySoundRequest', 'SRAmbientLightSample', 'SRApplicationUsage', 'SRDeletionRecord', 'SRDevice', 'SRDeviceUsageReport', 'SRFetchRequest', 'SRFetchResult', 'SRKeyboardMetrics', 'SRKeyboardProbabilityMetric', 'SRMessagesUsageReport', 'SRNotificationUsage', 'SRPhoneUsageReport', 'SRSensorReader', 'SRVisit', 'SRWebUsage', 'SRWristDetection', 'SSReadingList', 'STScreenTimeConfiguration', 'STScreenTimeConfigurationObserver', 'STWebHistory', 'STWebpageController', 'TKBERTLVRecord', 'TKCompactTLVRecord', 'TKSimpleTLVRecord', 'TKSmartCard', 'TKSmartCardATR', 'TKSmartCardATRInterfaceGroup', 'TKSmartCardPINFormat', 'TKSmartCardSlot', 'TKSmartCardSlotManager', 'TKSmartCardToken', 'TKSmartCardTokenDriver', 'TKSmartCardTokenSession', 'TKSmartCardUserInteraction', 'TKSmartCardUserInteractionForPINOperation', 'TKSmartCardUserInteractionForSecurePINChange', 'TKSmartCardUserInteractionForSecurePINVerification', 'TKTLVRecord', 'TKToken', 'TKTokenAuthOperation', 'TKTokenConfiguration', 'TKTokenDriver', 'TKTokenDriverConfiguration', 'TKTokenKeyAlgorithm', 'TKTokenKeyExchangeParameters', 'TKTokenKeychainCertificate', 'TKTokenKeychainContents', 'TKTokenKeychainItem', 'TKTokenKeychainKey', 'TKTokenPasswordAuthOperation', 'TKTokenSession', 'TKTokenSmartCardPINAuthOperation', 'TKTokenWatcher', 'TWRequest', 'TWTweetComposeViewController', 'UIAcceleration', 'UIAccelerometer', 'UIAccessibilityCustomAction', 'UIAccessibilityCustomRotor', 'UIAccessibilityCustomRotorItemResult', 'UIAccessibilityCustomRotorSearchPredicate', 'UIAccessibilityElement', 'UIAccessibilityLocationDescriptor', 'UIAction', 'UIActionSheet', 'UIActivity', 'UIActivityIndicatorView', 'UIActivityItemProvider', 'UIActivityItemsConfiguration', 'UIActivityViewController', 'UIAlertAction', 'UIAlertController', 'UIAlertView', 'UIApplication', 'UIApplicationShortcutIcon', 'UIApplicationShortcutItem', 'UIAttachmentBehavior', 'UIBackgroundConfiguration', 'UIBarAppearance', 'UIBarButtonItem', 'UIBarButtonItemAppearance', 'UIBarButtonItemGroup', 'UIBarButtonItemStateAppearance', 'UIBarItem', 'UIBezierPath', 'UIBlurEffect', 'UIButton', 'UICellAccessory', 'UICellAccessoryCheckmark', 'UICellAccessoryCustomView', 'UICellAccessoryDelete', 'UICellAccessoryDisclosureIndicator', 'UICellAccessoryInsert', 'UICellAccessoryLabel', 'UICellAccessoryMultiselect', 'UICellAccessoryOutlineDisclosure', 'UICellAccessoryReorder', 'UICellConfigurationState', 'UICloudSharingController', 'UICollectionLayoutListConfiguration', 'UICollectionReusableView', 'UICollectionView', 'UICollectionViewCell', 'UICollectionViewCellRegistration', 'UICollectionViewCompositionalLayout', 'UICollectionViewCompositionalLayoutConfiguration', 'UICollectionViewController', 'UICollectionViewDiffableDataSource', 'UICollectionViewDiffableDataSourceReorderingHandlers', 'UICollectionViewDiffableDataSourceSectionSnapshotHandlers', 'UICollectionViewDropPlaceholder', 'UICollectionViewDropProposal', 'UICollectionViewFlowLayout', 'UICollectionViewFlowLayoutInvalidationContext', 'UICollectionViewFocusUpdateContext', 'UICollectionViewLayout', 'UICollectionViewLayoutAttributes', 'UICollectionViewLayoutInvalidationContext', 'UICollectionViewListCell', 'UICollectionViewPlaceholder', 'UICollectionViewSupplementaryRegistration', 'UICollectionViewTransitionLayout', 'UICollectionViewUpdateItem', 'UICollisionBehavior', 'UIColor', 'UIColorPickerViewController', 'UIColorWell', 'UICommand', 'UICommandAlternate', 'UIContextMenuConfiguration', 'UIContextMenuInteraction', 'UIContextualAction', 'UIControl', 'UICubicTimingParameters', 'UIDatePicker', 'UIDeferredMenuElement', 'UIDevice', 'UIDictationPhrase', 'UIDocument', 'UIDocumentBrowserAction', 'UIDocumentBrowserTransitionController', 'UIDocumentBrowserViewController', 'UIDocumentInteractionController', 'UIDocumentMenuViewController', 'UIDocumentPickerExtensionViewController', 'UIDocumentPickerViewController', 'UIDragInteraction', 'UIDragItem', 'UIDragPreview', 'UIDragPreviewParameters', 'UIDragPreviewTarget', 'UIDropInteraction', 'UIDropProposal', 'UIDynamicAnimator', 'UIDynamicBehavior', 'UIDynamicItemBehavior', 'UIDynamicItemGroup', 'UIEvent', 'UIFeedbackGenerator', 'UIFieldBehavior', 'UIFocusAnimationCoordinator', 'UIFocusDebugger', 'UIFocusGuide', 'UIFocusMovementHint', 'UIFocusSystem', 'UIFocusUpdateContext', 'UIFont', 'UIFontDescriptor', 'UIFontMetrics', 'UIFontPickerViewController', 'UIFontPickerViewControllerConfiguration', 'UIGestureRecognizer', 'UIGraphicsImageRenderer', 'UIGraphicsImageRendererContext', 'UIGraphicsImageRendererFormat', 'UIGraphicsPDFRenderer', 'UIGraphicsPDFRendererContext', 'UIGraphicsPDFRendererFormat', 'UIGraphicsRenderer', 'UIGraphicsRendererContext', 'UIGraphicsRendererFormat', 'UIGravityBehavior', 'UIHoverGestureRecognizer', 'UIImage', 'UIImageAsset', 'UIImageConfiguration', 'UIImagePickerController', 'UIImageSymbolConfiguration', 'UIImageView', 'UIImpactFeedbackGenerator', 'UIIndirectScribbleInteraction', 'UIInputView', 'UIInputViewController', 'UIInterpolatingMotionEffect', 'UIKey', 'UIKeyCommand', 'UILabel', 'UILargeContentViewerInteraction', 'UILayoutGuide', 'UILexicon', 'UILexiconEntry', 'UIListContentConfiguration', 'UIListContentImageProperties', 'UIListContentTextProperties', 'UIListContentView', 'UILocalNotification', 'UILocalizedIndexedCollation', 'UILongPressGestureRecognizer', 'UIManagedDocument', 'UIMarkupTextPrintFormatter', 'UIMenu', 'UIMenuController', 'UIMenuElement', 'UIMenuItem', 'UIMenuSystem', 'UIMotionEffect', 'UIMotionEffectGroup', 'UIMutableApplicationShortcutItem', 'UIMutableUserNotificationAction', 'UIMutableUserNotificationCategory', 'UINavigationBar', 'UINavigationBarAppearance', 'UINavigationController', 'UINavigationItem', 'UINib', 'UINotificationFeedbackGenerator', 'UIOpenURLContext', 'UIPageControl', 'UIPageViewController', 'UIPanGestureRecognizer', 'UIPasteConfiguration', 'UIPasteboard', 'UIPencilInteraction', 'UIPercentDrivenInteractiveTransition', 'UIPickerView', 'UIPinchGestureRecognizer', 'UIPointerEffect', 'UIPointerHighlightEffect', 'UIPointerHoverEffect', 'UIPointerInteraction', 'UIPointerLiftEffect', 'UIPointerLockState', 'UIPointerRegion', 'UIPointerRegionRequest', 'UIPointerShape', 'UIPointerStyle', 'UIPopoverBackgroundView', 'UIPopoverController', 'UIPopoverPresentationController', 'UIPresentationController', 'UIPress', 'UIPressesEvent', 'UIPreviewAction', 'UIPreviewActionGroup', 'UIPreviewInteraction', 'UIPreviewParameters', 'UIPreviewTarget', 'UIPrintFormatter', 'UIPrintInfo', 'UIPrintInteractionController', 'UIPrintPageRenderer', 'UIPrintPaper', 'UIPrinter', 'UIPrinterPickerController', 'UIProgressView', 'UIPushBehavior', 'UIReferenceLibraryViewController', 'UIRefreshControl', 'UIRegion', 'UIResponder', 'UIRotationGestureRecognizer', 'UIScene', 'UISceneActivationConditions', 'UISceneActivationRequestOptions', 'UISceneConfiguration', 'UISceneConnectionOptions', 'UISceneDestructionRequestOptions', 'UISceneOpenExternalURLOptions', 'UISceneOpenURLOptions', 'UISceneSession', 'UISceneSizeRestrictions', 'UIScreen', 'UIScreenEdgePanGestureRecognizer', 'UIScreenMode', 'UIScreenshotService', 'UIScribbleInteraction', 'UIScrollView', 'UISearchBar', 'UISearchContainerViewController', 'UISearchController', 'UISearchDisplayController', 'UISearchSuggestionItem', 'UISearchTextField', 'UISearchToken', 'UISegmentedControl', 'UISelectionFeedbackGenerator', 'UISimpleTextPrintFormatter', 'UISlider', 'UISnapBehavior', 'UISplitViewController', 'UISpringLoadedInteraction', 'UISpringTimingParameters', 'UIStackView', 'UIStatusBarManager', 'UIStepper', 'UIStoryboard', 'UIStoryboardPopoverSegue', 'UIStoryboardSegue', 'UIStoryboardUnwindSegueSource', 'UISwipeActionsConfiguration', 'UISwipeGestureRecognizer', 'UISwitch', 'UITabBar', 'UITabBarAppearance', 'UITabBarController', 'UITabBarItem', 'UITabBarItemAppearance', 'UITabBarItemStateAppearance', 'UITableView', 'UITableViewCell', 'UITableViewController', 'UITableViewDiffableDataSource', 'UITableViewDropPlaceholder', 'UITableViewDropProposal', 'UITableViewFocusUpdateContext', 'UITableViewHeaderFooterView', 'UITableViewPlaceholder', 'UITableViewRowAction', 'UITapGestureRecognizer', 'UITargetedDragPreview', 'UITargetedPreview', 'UITextChecker', 'UITextDragPreviewRenderer', 'UITextDropProposal', 'UITextField', 'UITextFormattingCoordinator', 'UITextInputAssistantItem', 'UITextInputMode', 'UITextInputPasswordRules', 'UITextInputStringTokenizer', 'UITextInteraction', 'UITextPlaceholder', 'UITextPosition', 'UITextRange', 'UITextSelectionRect', 'UITextView', 'UITitlebar', 'UIToolbar', 'UIToolbarAppearance', 'UITouch', 'UITraitCollection', 'UIUserNotificationAction', 'UIUserNotificationCategory', 'UIUserNotificationSettings', 'UIVibrancyEffect', 'UIVideoEditorController', 'UIView', 'UIViewConfigurationState', 'UIViewController', 'UIViewPrintFormatter', 'UIViewPropertyAnimator', 'UIVisualEffect', 'UIVisualEffectView', 'UIWebView', 'UIWindow', 'UIWindowScene', 'UIWindowSceneDestructionRequestOptions', 'UNCalendarNotificationTrigger', 'UNLocationNotificationTrigger', 'UNMutableNotificationContent', 'UNNotification', 'UNNotificationAction', 'UNNotificationAttachment', 'UNNotificationCategory', 'UNNotificationContent', 'UNNotificationRequest', 'UNNotificationResponse', 'UNNotificationServiceExtension', 'UNNotificationSettings', 'UNNotificationSound', 'UNNotificationTrigger', 'UNPushNotificationTrigger', 'UNTextInputNotificationAction', 'UNTextInputNotificationResponse', 'UNTimeIntervalNotificationTrigger', 'UNUserNotificationCenter', 'UTType', 'VNBarcodeObservation', 'VNCircle', 'VNClassificationObservation', 'VNClassifyImageRequest', 'VNContour', 'VNContoursObservation', 'VNCoreMLFeatureValueObservation', 'VNCoreMLModel', 'VNCoreMLRequest', 'VNDetectBarcodesRequest', 'VNDetectContoursRequest', 'VNDetectFaceCaptureQualityRequest', 'VNDetectFaceLandmarksRequest', 'VNDetectFaceRectanglesRequest', 'VNDetectHorizonRequest', 'VNDetectHumanBodyPoseRequest', 'VNDetectHumanHandPoseRequest', 'VNDetectHumanRectanglesRequest', 'VNDetectRectanglesRequest', 'VNDetectTextRectanglesRequest', 'VNDetectTrajectoriesRequest', 'VNDetectedObjectObservation', 'VNDetectedPoint', 'VNDocumentCameraScan', 'VNDocumentCameraViewController', 'VNFaceLandmarkRegion', 'VNFaceLandmarkRegion2D', 'VNFaceLandmarks', 'VNFaceLandmarks2D', 'VNFaceObservation', 'VNFeaturePrintObservation', 'VNGenerateAttentionBasedSaliencyImageRequest', 'VNGenerateImageFeaturePrintRequest', 'VNGenerateObjectnessBasedSaliencyImageRequest', 'VNGenerateOpticalFlowRequest', 'VNGeometryUtils', 'VNHomographicImageRegistrationRequest', 'VNHorizonObservation', 'VNHumanBodyPoseObservation', 'VNHumanHandPoseObservation', 'VNImageAlignmentObservation', 'VNImageBasedRequest', 'VNImageHomographicAlignmentObservation', 'VNImageRegistrationRequest', 'VNImageRequestHandler', 'VNImageTranslationAlignmentObservation', 'VNObservation', 'VNPixelBufferObservation', 'VNPoint', 'VNRecognizeAnimalsRequest', 'VNRecognizeTextRequest', 'VNRecognizedObjectObservation', 'VNRecognizedPoint', 'VNRecognizedPointsObservation', 'VNRecognizedText', 'VNRecognizedTextObservation', 'VNRectangleObservation', 'VNRequest', 'VNSaliencyImageObservation', 'VNSequenceRequestHandler', 'VNStatefulRequest', 'VNTargetedImageRequest', 'VNTextObservation', 'VNTrackObjectRequest', 'VNTrackRectangleRequest', 'VNTrackingRequest', 'VNTrajectoryObservation', 'VNTranslationalImageRegistrationRequest', 'VNVector', 'VNVideoProcessor', 'VNVideoProcessorCadence', 'VNVideoProcessorFrameRateCadence', 'VNVideoProcessorRequestProcessingOptions', 'VNVideoProcessorTimeIntervalCadence', 'VSAccountApplicationProvider', 'VSAccountManager', 'VSAccountManagerResult', 'VSAccountMetadata', 'VSAccountMetadataRequest', 'VSAccountProviderResponse', 'VSSubscription', 'VSSubscriptionRegistrationCenter', 'WCSession', 'WCSessionFile', 'WCSessionFileTransfer', 'WCSessionUserInfoTransfer', 'WKBackForwardList', 'WKBackForwardListItem', 'WKContentRuleList', 'WKContentRuleListStore', 'WKContentWorld', 'WKContextMenuElementInfo', 'WKFindConfiguration', 'WKFindResult', 'WKFrameInfo', 'WKHTTPCookieStore', 'WKNavigation', 'WKNavigationAction', 'WKNavigationResponse', 'WKOpenPanelParameters', 'WKPDFConfiguration', 'WKPreferences', 'WKPreviewElementInfo', 'WKProcessPool', 'WKScriptMessage', 'WKSecurityOrigin', 'WKSnapshotConfiguration', 'WKUserContentController', 'WKUserScript', 'WKWebView', 'WKWebViewConfiguration', 'WKWebpagePreferences', 'WKWebsiteDataRecord', 'WKWebsiteDataStore', 'WKWindowFeatures', '__EntityAccessibilityWrapper'}
-COCOA_PROTOCOLS = {'ABNewPersonViewControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'ABPersonViewControllerDelegate', 'ABUnknownPersonViewControllerDelegate', 'ADActionViewControllerChildInterface', 'ADActionViewControllerInterface', 'ADBannerViewDelegate', 'ADInterstitialAdDelegate', 'AEAssessmentSessionDelegate', 'ARAnchorCopying', 'ARCoachingOverlayViewDelegate', 'ARSCNViewDelegate', 'ARSKViewDelegate', 'ARSessionDelegate', 'ARSessionObserver', 'ARSessionProviding', 'ARTrackable', 'ASAccountAuthenticationModificationControllerDelegate', 'ASAccountAuthenticationModificationControllerPresentationContextProviding', 'ASAuthorizationControllerDelegate', 'ASAuthorizationControllerPresentationContextProviding', 'ASAuthorizationCredential', 'ASAuthorizationProvider', 'ASAuthorizationProviderExtensionAuthorizationRequestHandler', 'ASWebAuthenticationPresentationContextProviding', 'ASWebAuthenticationSessionRequestDelegate', 'ASWebAuthenticationSessionWebBrowserSessionHandling', 'AUAudioUnitFactory', 'AVAssetDownloadDelegate', 'AVAssetResourceLoaderDelegate', 'AVAssetWriterDelegate', 'AVAsynchronousKeyValueLoading', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'AVCaptureDataOutputSynchronizerDelegate', 'AVCaptureDepthDataOutputDelegate', 'AVCaptureFileOutputDelegate', 'AVCaptureFileOutputRecordingDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'AVCapturePhotoCaptureDelegate', 'AVCapturePhotoFileDataRepresentationCustomizer', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'AVContentKeyRecipient', 'AVContentKeySessionDelegate', 'AVFragmentMinding', 'AVPictureInPictureControllerDelegate', 'AVPlayerItemLegibleOutputPushDelegate', 'AVPlayerItemMetadataCollectorPushDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'AVPlayerItemOutputPullDelegate', 'AVPlayerItemOutputPushDelegate', 'AVPlayerViewControllerDelegate', 'AVQueuedSampleBufferRendering', 'AVRoutePickerViewDelegate', 'AVVideoCompositing', 'AVVideoCompositionInstruction', 'AVVideoCompositionValidationHandling', 'AXCustomContentProvider', 'CAAction', 'CAAnimationDelegate', 'CALayerDelegate', 'CAMediaTiming', 'CAMetalDrawable', 'CBCentralManagerDelegate', 'CBPeripheralDelegate', 'CBPeripheralManagerDelegate', 'CHHapticAdvancedPatternPlayer', 'CHHapticDeviceCapability', 'CHHapticParameterAttributes', 'CHHapticPatternPlayer', 'CIAccordionFoldTransition', 'CIAffineClamp', 'CIAffineTile', 'CIAreaAverage', 'CIAreaHistogram', 'CIAreaMaximum', 'CIAreaMaximumAlpha', 'CIAreaMinMax', 'CIAreaMinMaxRed', 'CIAreaMinimum', 'CIAreaMinimumAlpha', 'CIAreaReductionFilter', 'CIAttributedTextImageGenerator', 'CIAztecCodeGenerator', 'CIBarcodeGenerator', 'CIBarsSwipeTransition', 'CIBicubicScaleTransform', 'CIBlendWithMask', 'CIBloom', 'CIBokehBlur', 'CIBoxBlur', 'CIBumpDistortion', 'CIBumpDistortionLinear', 'CICMYKHalftone', 'CICheckerboardGenerator', 'CICircleSplashDistortion', 'CICircularScreen', 'CICircularWrap', 'CICode128BarcodeGenerator', 'CIColorAbsoluteDifference', 'CIColorClamp', 'CIColorControls', 'CIColorCrossPolynomial', 'CIColorCube', 'CIColorCubeWithColorSpace', 'CIColorCubesMixedWithMask', 'CIColorCurves', 'CIColorInvert', 'CIColorMap', 'CIColorMatrix', 'CIColorMonochrome', 'CIColorPolynomial', 'CIColorPosterize', 'CIColorThreshold', 'CIColorThresholdOtsu', 'CIColumnAverage', 'CIComicEffect', 'CICompositeOperation', 'CIConvolution', 'CICopyMachineTransition', 'CICoreMLModel', 'CICrystallize', 'CIDepthOfField', 'CIDepthToDisparity', 'CIDiscBlur', 'CIDisintegrateWithMaskTransition', 'CIDisparityToDepth', 'CIDisplacementDistortion', 'CIDissolveTransition', 'CIDither', 'CIDocumentEnhancer', 'CIDotScreen', 'CIDroste', 'CIEdgePreserveUpsample', 'CIEdgeWork', 'CIEdges', 'CIEightfoldReflectedTile', 'CIExposureAdjust', 'CIFalseColor', 'CIFilter', 'CIFilterConstructor', 'CIFlashTransition', 'CIFourCoordinateGeometryFilter', 'CIFourfoldReflectedTile', 'CIFourfoldRotatedTile', 'CIFourfoldTranslatedTile', 'CIGaborGradients', 'CIGammaAdjust', 'CIGaussianBlur', 'CIGaussianGradient', 'CIGlassDistortion', 'CIGlassLozenge', 'CIGlideReflectedTile', 'CIGloom', 'CIHatchedScreen', 'CIHeightFieldFromMask', 'CIHexagonalPixellate', 'CIHighlightShadowAdjust', 'CIHistogramDisplay', 'CIHoleDistortion', 'CIHueAdjust', 'CIHueSaturationValueGradient', 'CIImageProcessorInput', 'CIImageProcessorOutput', 'CIKMeans', 'CIKaleidoscope', 'CIKeystoneCorrectionCombined', 'CIKeystoneCorrectionHorizontal', 'CIKeystoneCorrectionVertical', 'CILabDeltaE', 'CILanczosScaleTransform', 'CILenticularHaloGenerator', 'CILightTunnel', 'CILineOverlay', 'CILineScreen', 'CILinearGradient', 'CILinearToSRGBToneCurve', 'CIMaskToAlpha', 'CIMaskedVariableBlur', 'CIMaximumComponent', 'CIMedian', 'CIMeshGenerator', 'CIMinimumComponent', 'CIMix', 'CIModTransition', 'CIMorphologyGradient', 'CIMorphologyMaximum', 'CIMorphologyMinimum', 'CIMorphologyRectangleMaximum', 'CIMorphologyRectangleMinimum', 'CIMotionBlur', 'CINinePartStretched', 'CINinePartTiled', 'CINoiseReduction', 'CIOpTile', 'CIPDF417BarcodeGenerator', 'CIPageCurlTransition', 'CIPageCurlWithShadowTransition', 'CIPaletteCentroid', 'CIPalettize', 'CIParallelogramTile', 'CIPerspectiveCorrection', 'CIPerspectiveRotate', 'CIPerspectiveTile', 'CIPerspectiveTransform', 'CIPerspectiveTransformWithExtent', 'CIPhotoEffect', 'CIPinchDistortion', 'CIPixellate', 'CIPlugInRegistration', 'CIPointillize', 'CIQRCodeGenerator', 'CIRadialGradient', 'CIRandomGenerator', 'CIRippleTransition', 'CIRoundedRectangleGenerator', 'CIRowAverage', 'CISRGBToneCurveToLinear', 'CISaliencyMap', 'CISepiaTone', 'CIShadedMaterial', 'CISharpenLuminance', 'CISixfoldReflectedTile', 'CISixfoldRotatedTile', 'CISmoothLinearGradient', 'CISpotColor', 'CISpotLight', 'CIStarShineGenerator', 'CIStraighten', 'CIStretchCrop', 'CIStripesGenerator', 'CISunbeamsGenerator', 'CISwipeTransition', 'CITemperatureAndTint', 'CITextImageGenerator', 'CIThermal', 'CIToneCurve', 'CITorusLensDistortion', 'CITransitionFilter', 'CITriangleKaleidoscope', 'CITriangleTile', 'CITwelvefoldReflectedTile', 'CITwirlDistortion', 'CIUnsharpMask', 'CIVibrance', 'CIVignette', 'CIVignetteEffect', 'CIVortexDistortion', 'CIWhitePointAdjust', 'CIXRay', 'CIZoomBlur', 'CKRecordKeyValueSetting', 'CKRecordValue', 'CLKComplicationDataSource', 'CLLocationManagerDelegate', 'CLSContextProvider', 'CLSDataStoreDelegate', 'CMFallDetectionDelegate', 'CMHeadphoneMotionManagerDelegate', 'CNChangeHistoryEventVisitor', 'CNContactPickerDelegate', 'CNContactViewControllerDelegate', 'CNKeyDescriptor', 'CPApplicationDelegate', 'CPBarButtonProviding', 'CPInterfaceControllerDelegate', 'CPListTemplateDelegate', 'CPListTemplateItem', 'CPMapTemplateDelegate', 'CPNowPlayingTemplateObserver', 'CPPointOfInterestTemplateDelegate', 'CPSearchTemplateDelegate', 'CPSelectableListItem', 'CPSessionConfigurationDelegate', 'CPTabBarTemplateDelegate', 'CPTemplateApplicationDashboardSceneDelegate', 'CPTemplateApplicationSceneDelegate', 'CSSearchableIndexDelegate', 'CTSubscriberDelegate', 'CTTelephonyNetworkInfoDelegate', 'CXCallDirectoryExtensionContextDelegate', 'CXCallObserverDelegate', 'CXProviderDelegate', 'EAAccessoryDelegate', 'EAGLDrawable', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'EKCalendarChooserDelegate', 'EKEventEditViewDelegate', 'EKEventViewDelegate', 'GCDevice', 'GKAchievementViewControllerDelegate', 'GKAgentDelegate', 'GKChallengeEventHandlerDelegate', 'GKChallengeListener', 'GKFriendRequestComposeViewControllerDelegate', 'GKGameCenterControllerDelegate', 'GKGameModel', 'GKGameModelPlayer', 'GKGameModelUpdate', 'GKGameSessionEventListener', 'GKGameSessionSharingViewControllerDelegate', 'GKInviteEventListener', 'GKLeaderboardViewControllerDelegate', 'GKLocalPlayerListener', 'GKMatchDelegate', 'GKMatchmakerViewControllerDelegate', 'GKPeerPickerControllerDelegate', 'GKRandom', 'GKSavedGameListener', 'GKSceneRootNodeType', 'GKSessionDelegate', 'GKStrategist', 'GKTurnBasedEventListener', 'GKTurnBasedMatchmakerViewControllerDelegate', 'GKVoiceChatClient', 'GLKNamedEffect', 'GLKViewControllerDelegate', 'GLKViewDelegate', 'HKLiveWorkoutBuilderDelegate', 'HKWorkoutSessionDelegate', 'HMAccessoryBrowserDelegate', 'HMAccessoryDelegate', 'HMCameraSnapshotControlDelegate', 'HMCameraStreamControlDelegate', 'HMHomeDelegate', 'HMHomeManagerDelegate', 'HMNetworkConfigurationProfileDelegate', 'ICCameraDeviceDelegate', 'ICCameraDeviceDownloadDelegate', 'ICDeviceBrowserDelegate', 'ICDeviceDelegate', 'ICScannerDeviceDelegate', 'ILMessageFilterQueryHandling', 'INActivateCarSignalIntentHandling', 'INAddMediaIntentHandling', 'INAddTasksIntentHandling', 'INAppendToNoteIntentHandling', 'INBookRestaurantReservationIntentHandling', 'INCallsDomainHandling', 'INCancelRideIntentHandling', 'INCancelWorkoutIntentHandling', 'INCarCommandsDomainHandling', 'INCarPlayDomainHandling', 'INCreateNoteIntentHandling', 'INCreateTaskListIntentHandling', 'INDeleteTasksIntentHandling', 'INEndWorkoutIntentHandling', 'INGetAvailableRestaurantReservationBookingDefaultsIntentHandling', 'INGetAvailableRestaurantReservationBookingsIntentHandling', 'INGetCarLockStatusIntentHandling', 'INGetCarPowerLevelStatusIntentHandling', 'INGetCarPowerLevelStatusIntentResponseObserver', 'INGetRestaurantGuestIntentHandling', 'INGetRideStatusIntentHandling', 'INGetRideStatusIntentResponseObserver', 'INGetUserCurrentRestaurantReservationBookingsIntentHandling', 'INGetVisualCodeIntentHandling', 'INIntentHandlerProviding', 'INListCarsIntentHandling', 'INListRideOptionsIntentHandling', 'INMessagesDomainHandling', 'INNotebookDomainHandling', 'INPauseWorkoutIntentHandling', 'INPayBillIntentHandling', 'INPaymentsDomainHandling', 'INPhotosDomainHandling', 'INPlayMediaIntentHandling', 'INRadioDomainHandling', 'INRequestPaymentIntentHandling', 'INRequestRideIntentHandling', 'INResumeWorkoutIntentHandling', 'INRidesharingDomainHandling', 'INSaveProfileInCarIntentHandling', 'INSearchCallHistoryIntentHandling', 'INSearchForAccountsIntentHandling', 'INSearchForBillsIntentHandling', 'INSearchForMediaIntentHandling', 'INSearchForMessagesIntentHandling', 'INSearchForNotebookItemsIntentHandling', 'INSearchForPhotosIntentHandling', 'INSendMessageIntentHandling', 'INSendPaymentIntentHandling', 'INSendRideFeedbackIntentHandling', 'INSetAudioSourceInCarIntentHandling', 'INSetCarLockStatusIntentHandling', 'INSetClimateSettingsInCarIntentHandling', 'INSetDefrosterSettingsInCarIntentHandling', 'INSetMessageAttributeIntentHandling', 'INSetProfileInCarIntentHandling', 'INSetRadioStationIntentHandling', 'INSetSeatSettingsInCarIntentHandling', 'INSetTaskAttributeIntentHandling', 'INSnoozeTasksIntentHandling', 'INSpeakable', 'INStartAudioCallIntentHandling', 'INStartCallIntentHandling', 'INStartPhotoPlaybackIntentHandling', 'INStartVideoCallIntentHandling', 'INStartWorkoutIntentHandling', 'INTransferMoneyIntentHandling', 'INUIAddVoiceShortcutButtonDelegate', 'INUIAddVoiceShortcutViewControllerDelegate', 'INUIEditVoiceShortcutViewControllerDelegate', 'INUIHostedViewControlling', 'INUIHostedViewSiriProviding', 'INUpdateMediaAffinityIntentHandling', 'INVisualCodeDomainHandling', 'INWorkoutsDomainHandling', 'JSExport', 'MCAdvertiserAssistantDelegate', 'MCBrowserViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MCNearbyServiceBrowserDelegate', 'MCSessionDelegate', 'MDLAssetResolver', 'MDLComponent', 'MDLJointAnimation', 'MDLLightProbeIrradianceDataSource', 'MDLMeshBuffer', 'MDLMeshBufferAllocator', 'MDLMeshBufferZone', 'MDLNamed', 'MDLObjectContainerComponent', 'MDLTransformComponent', 'MDLTransformOp', 'MFMailComposeViewControllerDelegate', 'MFMessageComposeViewControllerDelegate', 'MIDICIProfileResponderDelegate', 'MKAnnotation', 'MKGeoJSONObject', 'MKLocalSearchCompleterDelegate', 'MKMapViewDelegate', 'MKOverlay', 'MKReverseGeocoderDelegate', 'MLBatchProvider', 'MLCustomLayer', 'MLCustomModel', 'MLFeatureProvider', 'MLWritable', 'MPMediaPickerControllerDelegate', 'MPMediaPlayback', 'MPNowPlayingSessionDelegate', 'MPPlayableContentDataSource', 'MPPlayableContentDelegate', 'MPSystemMusicPlayerController', 'MSAuthenticationPresentationContext', 'MSMessagesAppTranscriptPresentation', 'MSStickerBrowserViewDataSource', 'MTKViewDelegate', 'MTLAccelerationStructure', 'MTLAccelerationStructureCommandEncoder', 'MTLArgumentEncoder', 'MTLBinaryArchive', 'MTLBlitCommandEncoder', 'MTLBuffer', 'MTLCaptureScope', 'MTLCommandBuffer', 'MTLCommandBufferEncoderInfo', 'MTLCommandEncoder', 'MTLCommandQueue', 'MTLComputeCommandEncoder', 'MTLComputePipelineState', 'MTLCounter', 'MTLCounterSampleBuffer', 'MTLCounterSet', 'MTLDepthStencilState', 'MTLDevice', 'MTLDrawable', 'MTLDynamicLibrary', 'MTLEvent', 'MTLFence', 'MTLFunction', 'MTLFunctionHandle', 'MTLFunctionLog', 'MTLFunctionLogDebugLocation', 'MTLHeap', 'MTLIndirectCommandBuffer', 'MTLIndirectComputeCommand', 'MTLIndirectComputeCommandEncoder', 'MTLIndirectRenderCommand', 'MTLIndirectRenderCommandEncoder', 'MTLIntersectionFunctionTable', 'MTLLibrary', 'MTLLogContainer', 'MTLParallelRenderCommandEncoder', 'MTLRasterizationRateMap', 'MTLRenderCommandEncoder', 'MTLRenderPipelineState', 'MTLResource', 'MTLResourceStateCommandEncoder', 'MTLSamplerState', 'MTLSharedEvent', 'MTLTexture', 'MTLVisibleFunctionTable', 'MXMetricManagerSubscriber', 'MyClassJavaScriptMethods', 'NCWidgetProviding', 'NEAppPushDelegate', 'NFCFeliCaTag', 'NFCISO15693Tag', 'NFCISO7816Tag', 'NFCMiFareTag', 'NFCNDEFReaderSessionDelegate', 'NFCNDEFTag', 'NFCReaderSession', 'NFCReaderSessionDelegate', 'NFCTag', 'NFCTagReaderSessionDelegate', 'NFCVASReaderSessionDelegate', 'NISessionDelegate', 'NSCacheDelegate', 'NSCoding', 'NSCollectionLayoutContainer', 'NSCollectionLayoutEnvironment', 'NSCollectionLayoutVisibleItem', 'NSCopying', 'NSDecimalNumberBehaviors', 'NSDiscardableContent', 'NSExtensionRequestHandling', 'NSFastEnumeration', 'NSFetchRequestResult', 'NSFetchedResultsControllerDelegate', 'NSFetchedResultsSectionInfo', 'NSFileManagerDelegate', 'NSFilePresenter', 'NSFileProviderChangeObserver', 'NSFileProviderEnumerationObserver', 'NSFileProviderEnumerator', 'NSFileProviderItem', 'NSFileProviderServiceSource', 'NSItemProviderReading', 'NSItemProviderWriting', 'NSKeyedArchiverDelegate', 'NSKeyedUnarchiverDelegate', 'NSLayoutManagerDelegate', 'NSLocking', 'NSMachPortDelegate', 'NSMetadataQueryDelegate', 'NSMutableCopying', 'NSNetServiceBrowserDelegate', 'NSNetServiceDelegate', 'NSPortDelegate', 'NSProgressReporting', 'NSSecureCoding', 'NSStreamDelegate', 'NSTextAttachmentContainer', 'NSTextLayoutOrientationProvider', 'NSTextStorageDelegate', 'NSURLAuthenticationChallengeSender', 'NSURLConnectionDataDelegate', 'NSURLConnectionDelegate', 'NSURLConnectionDownloadDelegate', 'NSURLProtocolClient', 'NSURLSessionDataDelegate', 'NSURLSessionDelegate', 'NSURLSessionDownloadDelegate', 'NSURLSessionStreamDelegate', 'NSURLSessionTaskDelegate', 'NSURLSessionWebSocketDelegate', 'NSUserActivityDelegate', 'NSXMLParserDelegate', 'NSXPCListenerDelegate', 'NSXPCProxyCreating', 'NWTCPConnectionAuthenticationDelegate', 'OSLogEntryFromProcess', 'OSLogEntryWithPayload', 'PDFDocumentDelegate', 'PDFViewDelegate', 'PHContentEditingController', 'PHLivePhotoFrame', 'PHLivePhotoViewDelegate', 'PHPhotoLibraryAvailabilityObserver', 'PHPhotoLibraryChangeObserver', 'PHPickerViewControllerDelegate', 'PKAddPassesViewControllerDelegate', 'PKAddPaymentPassViewControllerDelegate', 'PKAddSecureElementPassViewControllerDelegate', 'PKCanvasViewDelegate', 'PKDisbursementAuthorizationControllerDelegate', 'PKIssuerProvisioningExtensionAuthorizationProviding', 'PKPaymentAuthorizationControllerDelegate', 'PKPaymentAuthorizationViewControllerDelegate', 'PKPaymentInformationRequestHandling', 'PKPushRegistryDelegate', 'PKToolPickerObserver', 'PreviewDisplaying', 'QLPreviewControllerDataSource', 'QLPreviewControllerDelegate', 'QLPreviewItem', 'QLPreviewingController', 'RPBroadcastActivityControllerDelegate', 'RPBroadcastActivityViewControllerDelegate', 'RPBroadcastControllerDelegate', 'RPPreviewViewControllerDelegate', 'RPScreenRecorderDelegate', 'SCNActionable', 'SCNAnimatable', 'SCNAnimation', 'SCNAvoidOccluderConstraintDelegate', 'SCNBoundingVolume', 'SCNBufferStream', 'SCNCameraControlConfiguration', 'SCNCameraControllerDelegate', 'SCNNodeRendererDelegate', 'SCNPhysicsContactDelegate', 'SCNProgramDelegate', 'SCNSceneExportDelegate', 'SCNSceneRenderer', 'SCNSceneRendererDelegate', 'SCNShadable', 'SCNTechniqueSupport', 'SFSafariViewControllerDelegate', 'SFSpeechRecognitionTaskDelegate', 'SFSpeechRecognizerDelegate', 'SKCloudServiceSetupViewControllerDelegate', 'SKOverlayDelegate', 'SKPaymentQueueDelegate', 'SKPaymentTransactionObserver', 'SKPhysicsContactDelegate', 'SKProductsRequestDelegate', 'SKRequestDelegate', 'SKSceneDelegate', 'SKStoreProductViewControllerDelegate', 'SKViewDelegate', 'SKWarpable', 'SNRequest', 'SNResult', 'SNResultsObserving', 'SRSensorReaderDelegate', 'TKSmartCardTokenDriverDelegate', 'TKSmartCardUserInteractionDelegate', 'TKTokenDelegate', 'TKTokenDriverDelegate', 'TKTokenSessionDelegate', 'UIAccelerometerDelegate', 'UIAccessibilityContainerDataTable', 'UIAccessibilityContainerDataTableCell', 'UIAccessibilityContentSizeCategoryImageAdjusting', 'UIAccessibilityIdentification', 'UIAccessibilityReadingContent', 'UIActionSheetDelegate', 'UIActivityItemSource', 'UIActivityItemsConfigurationReading', 'UIAdaptivePresentationControllerDelegate', 'UIAlertViewDelegate', 'UIAppearance', 'UIAppearanceContainer', 'UIApplicationDelegate', 'UIBarPositioning', 'UIBarPositioningDelegate', 'UICloudSharingControllerDelegate', 'UICollectionViewDataSource', 'UICollectionViewDataSourcePrefetching', 'UICollectionViewDelegate', 'UICollectionViewDelegateFlowLayout', 'UICollectionViewDragDelegate', 'UICollectionViewDropCoordinator', 'UICollectionViewDropDelegate', 'UICollectionViewDropItem', 'UICollectionViewDropPlaceholderContext', 'UICollisionBehaviorDelegate', 'UIColorPickerViewControllerDelegate', 'UIConfigurationState', 'UIContentConfiguration', 'UIContentContainer', 'UIContentSizeCategoryAdjusting', 'UIContentView', 'UIContextMenuInteractionAnimating', 'UIContextMenuInteractionCommitAnimating', 'UIContextMenuInteractionDelegate', 'UICoordinateSpace', 'UIDataSourceModelAssociation', 'UIDataSourceTranslating', 'UIDocumentBrowserViewControllerDelegate', 'UIDocumentInteractionControllerDelegate', 'UIDocumentMenuDelegate', 'UIDocumentPickerDelegate', 'UIDragAnimating', 'UIDragDropSession', 'UIDragInteractionDelegate', 'UIDragSession', 'UIDropInteractionDelegate', 'UIDropSession', 'UIDynamicAnimatorDelegate', 'UIDynamicItem', 'UIFocusAnimationContext', 'UIFocusDebuggerOutput', 'UIFocusEnvironment', 'UIFocusItem', 'UIFocusItemContainer', 'UIFocusItemScrollableContainer', 'UIFontPickerViewControllerDelegate', 'UIGestureRecognizerDelegate', 'UIGuidedAccessRestrictionDelegate', 'UIImageConfiguration', 'UIImagePickerControllerDelegate', 'UIIndirectScribbleInteractionDelegate', 'UIInputViewAudioFeedback', 'UIInteraction', 'UIItemProviderPresentationSizeProviding', 'UIKeyInput', 'UILargeContentViewerInteractionDelegate', 'UILargeContentViewerItem', 'UILayoutSupport', 'UIMenuBuilder', 'UINavigationBarDelegate', 'UINavigationControllerDelegate', 'UIObjectRestoration', 'UIPageViewControllerDataSource', 'UIPageViewControllerDelegate', 'UIPasteConfigurationSupporting', 'UIPencilInteractionDelegate', 'UIPickerViewAccessibilityDelegate', 'UIPickerViewDataSource', 'UIPickerViewDelegate', 'UIPointerInteractionAnimating', 'UIPointerInteractionDelegate', 'UIPopoverBackgroundViewMethods', 'UIPopoverControllerDelegate', 'UIPopoverPresentationControllerDelegate', 'UIPreviewActionItem', 'UIPreviewInteractionDelegate', 'UIPrintInteractionControllerDelegate', 'UIPrinterPickerControllerDelegate', 'UIResponderStandardEditActions', 'UISceneDelegate', 'UIScreenshotServiceDelegate', 'UIScribbleInteractionDelegate', 'UIScrollViewAccessibilityDelegate', 'UIScrollViewDelegate', 'UISearchBarDelegate', 'UISearchControllerDelegate', 'UISearchDisplayDelegate', 'UISearchResultsUpdating', 'UISearchSuggestion', 'UISearchTextFieldDelegate', 'UISearchTextFieldPasteItem', 'UISplitViewControllerDelegate', 'UISpringLoadedInteractionBehavior', 'UISpringLoadedInteractionContext', 'UISpringLoadedInteractionEffect', 'UISpringLoadedInteractionSupporting', 'UIStateRestoring', 'UITabBarControllerDelegate', 'UITabBarDelegate', 'UITableViewDataSource', 'UITableViewDataSourcePrefetching', 'UITableViewDelegate', 'UITableViewDragDelegate', 'UITableViewDropCoordinator', 'UITableViewDropDelegate', 'UITableViewDropItem', 'UITableViewDropPlaceholderContext', 'UITextDocumentProxy', 'UITextDragDelegate', 'UITextDragRequest', 'UITextDraggable', 'UITextDropDelegate', 'UITextDropRequest', 'UITextDroppable', 'UITextFieldDelegate', 'UITextFormattingCoordinatorDelegate', 'UITextInput', 'UITextInputDelegate', 'UITextInputTokenizer', 'UITextInputTraits', 'UITextInteractionDelegate', 'UITextPasteConfigurationSupporting', 'UITextPasteDelegate', 'UITextPasteItem', 'UITextSelecting', 'UITextViewDelegate', 'UITimingCurveProvider', 'UIToolbarDelegate', 'UITraitEnvironment', 'UIUserActivityRestoring', 'UIVideoEditorControllerDelegate', 'UIViewAnimating', 'UIViewControllerAnimatedTransitioning', 'UIViewControllerContextTransitioning', 'UIViewControllerInteractiveTransitioning', 'UIViewControllerPreviewing', 'UIViewControllerPreviewingDelegate', 'UIViewControllerRestoration', 'UIViewControllerTransitionCoordinator', 'UIViewControllerTransitionCoordinatorContext', 'UIViewControllerTransitioningDelegate', 'UIViewImplicitlyAnimating', 'UIWebViewDelegate', 'UIWindowSceneDelegate', 'UNNotificationContentExtension', 'UNUserNotificationCenterDelegate', 'VNDocumentCameraViewControllerDelegate', 'VNFaceObservationAccepting', 'VNRequestProgressProviding', 'VNRequestRevisionProviding', 'VSAccountManagerDelegate', 'WCSessionDelegate', 'WKHTTPCookieStoreObserver', 'WKNavigationDelegate', 'WKPreviewActionItem', 'WKScriptMessageHandler', 'WKScriptMessageHandlerWithReply', 'WKUIDelegate', 'WKURLSchemeHandler', 'WKURLSchemeTask'}
-COCOA_PRIMITIVES = {'ACErrorCode', 'ALCcontext_struct', 'ALCdevice_struct', 'ALMXGlyphEntry', 'ALMXHeader', 'API_UNAVAILABLE', 'AUChannelInfo', 'AUDependentParameter', 'AUDistanceAttenuationData', 'AUHostIdentifier', 'AUHostVersionIdentifier', 'AUInputSamplesInOutputCallbackStruct', 'AUMIDIEvent', 'AUMIDIOutputCallbackStruct', 'AUNodeInteraction', 'AUNodeRenderCallback', 'AUNumVersion', 'AUParameterAutomationEvent', 'AUParameterEvent', 'AUParameterMIDIMapping', 'AUPreset', 'AUPresetEvent', 'AURecordedParameterEvent', 'AURenderCallbackStruct', 'AURenderEventHeader', 'AUSamplerBankPresetData', 'AUSamplerInstrumentData', 'AnchorPoint', 'AnchorPointTable', 'AnkrTable', 'AudioBalanceFade', 'AudioBuffer', 'AudioBufferList', 'AudioBytePacketTranslation', 'AudioChannelDescription', 'AudioChannelLayout', 'AudioClassDescription', 'AudioCodecMagicCookieInfo', 'AudioCodecPrimeInfo', 'AudioComponentDescription', 'AudioComponentPlugInInterface', 'AudioConverterPrimeInfo', 'AudioFileMarker', 'AudioFileMarkerList', 'AudioFilePacketTableInfo', 'AudioFileRegion', 'AudioFileRegionList', 'AudioFileTypeAndFormatID', 'AudioFile_SMPTE_Time', 'AudioFormatInfo', 'AudioFormatListItem', 'AudioFramePacketTranslation', 'AudioIndependentPacketTranslation', 'AudioOutputUnitMIDICallbacks', 'AudioOutputUnitStartAtTimeParams', 'AudioPacketDependencyInfoTranslation', 'AudioPacketRangeByteCountTranslation', 'AudioPacketRollDistanceTranslation', 'AudioPanningInfo', 'AudioQueueBuffer', 'AudioQueueChannelAssignment', 'AudioQueueLevelMeterState', 'AudioQueueParameterEvent', 'AudioStreamBasicDescription', 'AudioStreamPacketDescription', 'AudioTimeStamp', 'AudioUnitCocoaViewInfo', 'AudioUnitConnection', 'AudioUnitExternalBuffer', 'AudioUnitFrequencyResponseBin', 'AudioUnitMIDIControlMapping', 'AudioUnitMeterClipping', 'AudioUnitNodeConnection', 'AudioUnitOtherPluginDesc', 'AudioUnitParameter', 'AudioUnitParameterEvent', 'AudioUnitParameterHistoryInfo', 'AudioUnitParameterInfo', 'AudioUnitParameterNameInfo', 'AudioUnitParameterStringFromValue', 'AudioUnitParameterValueFromString', 'AudioUnitParameterValueName', 'AudioUnitParameterValueTranslation', 'AudioUnitPresetMAS_SettingData', 'AudioUnitPresetMAS_Settings', 'AudioUnitProperty', 'AudioUnitRenderContext', 'AudioValueRange', 'AudioValueTranslation', 'AuthorizationOpaqueRef', 'BslnFormat0Part', 'BslnFormat1Part', 'BslnFormat2Part', 'BslnFormat3Part', 'BslnTable', 'CABarBeatTime', 'CAFAudioDescription', 'CAFChunkHeader', 'CAFDataChunk', 'CAFFileHeader', 'CAFInfoStrings', 'CAFInstrumentChunk', 'CAFMarker', 'CAFMarkerChunk', 'CAFOverviewChunk', 'CAFOverviewSample', 'CAFPacketTableHeader', 'CAFPeakChunk', 'CAFPositionPeak', 'CAFRegion', 'CAFRegionChunk', 'CAFStringID', 'CAFStrings', 'CAFUMIDChunk', 'CAF_SMPTE_Time', 'CAF_UUID_ChunkHeader', 'CA_BOXABLE', 'CFHostClientContext', 'CFNetServiceClientContext', 'CF_BRIDGED_MUTABLE_TYPE', 'CF_BRIDGED_TYPE', 'CF_RELATED_TYPE', 'CGAffineTransform', 'CGDataConsumerCallbacks', 'CGDataProviderDirectCallbacks', 'CGDataProviderSequentialCallbacks', 'CGFunctionCallbacks', 'CGPDFArray', 'CGPDFContentStream', 'CGPDFDictionary', 'CGPDFObject', 'CGPDFOperatorTable', 'CGPDFScanner', 'CGPDFStream', 'CGPDFString', 'CGPathElement', 'CGPatternCallbacks', 'CGVector', 'CG_BOXABLE', 'CLLocationCoordinate2D', 'CM_BRIDGED_TYPE', 'CTParagraphStyleSetting', 'CVPlanarComponentInfo', 'CVPlanarPixelBufferInfo', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'CVSMPTETime', 'CV_BRIDGED_TYPE', 'ComponentInstanceRecord', 'ExtendedAudioFormatInfo', 'ExtendedControlEvent', 'ExtendedNoteOnEvent', 'ExtendedTempoEvent', 'FontVariation', 'GCQuaternion', 'GKBox', 'GKQuad', 'GKTriangle', 'GLKEffectPropertyPrv', 'HostCallbackInfo', 'IIO_BRIDGED_TYPE', 'IUnknownVTbl', 'JustDirectionTable', 'JustPCAction', 'JustPCActionSubrecord', 'JustPCConditionalAddAction', 'JustPCDecompositionAction', 'JustPCDuctilityAction', 'JustPCGlyphRepeatAddAction', 'JustPostcompTable', 'JustTable', 'JustWidthDeltaEntry', 'JustWidthDeltaGroup', 'KernIndexArrayHeader', 'KernKerningPair', 'KernOffsetTable', 'KernOrderedListEntry', 'KernOrderedListHeader', 'KernSimpleArrayHeader', 'KernStateEntry', 'KernStateHeader', 'KernSubtableHeader', 'KernTableHeader', 'KernVersion0Header', 'KernVersion0SubtableHeader', 'KerxAnchorPointAction', 'KerxControlPointAction', 'KerxControlPointEntry', 'KerxControlPointHeader', 'KerxCoordinateAction', 'KerxIndexArrayHeader', 'KerxKerningPair', 'KerxOrderedListEntry', 'KerxOrderedListHeader', 'KerxSimpleArrayHeader', 'KerxStateEntry', 'KerxStateHeader', 'KerxSubtableHeader', 'KerxTableHeader', 'LcarCaretClassEntry', 'LcarCaretTable', 'LtagStringRange', 'LtagTable', 'MDL_CLASS_EXPORT', 'MIDICIDeviceIdentification', 'MIDIChannelMessage', 'MIDIControlTransform', 'MIDIDriverInterface', 'MIDIEventList', 'MIDIEventPacket', 'MIDIIOErrorNotification', 'MIDIMessage_128', 'MIDIMessage_64', 'MIDIMessage_96', 'MIDIMetaEvent', 'MIDINoteMessage', 'MIDINotification', 'MIDIObjectAddRemoveNotification', 'MIDIObjectPropertyChangeNotification', 'MIDIPacket', 'MIDIPacketList', 'MIDIRawData', 'MIDISysexSendRequest', 'MIDIThruConnectionEndpoint', 'MIDIThruConnectionParams', 'MIDITransform', 'MIDIValueMap', 'MPSDeviceOptions', 'MixerDistanceParams', 'MortChain', 'MortContextualSubtable', 'MortFeatureEntry', 'MortInsertionSubtable', 'MortLigatureSubtable', 'MortRearrangementSubtable', 'MortSubtable', 'MortSwashSubtable', 'MortTable', 'MorxChain', 'MorxContextualSubtable', 'MorxInsertionSubtable', 'MorxLigatureSubtable', 'MorxRearrangementSubtable', 'MorxSubtable', 'MorxTable', 'MusicDeviceNoteParams', 'MusicDeviceStdNoteParams', 'MusicEventUserData', 'MusicTrackLoopInfo', 'NoteParamsControlValue', 'OpaqueAudioComponent', 'OpaqueAudioComponentInstance', 'OpaqueAudioConverter', 'OpaqueAudioQueue', 'OpaqueAudioQueueProcessingTap', 'OpaqueAudioQueueTimeline', 'OpaqueExtAudioFile', 'OpaqueJSClass', 'OpaqueJSContext', 'OpaqueJSContextGroup', 'OpaqueJSPropertyNameAccumulator', 'OpaqueJSPropertyNameArray', 'OpaqueJSString', 'OpaqueJSValue', 'OpaqueMusicEventIterator', 'OpaqueMusicPlayer', 'OpaqueMusicSequence', 'OpaqueMusicTrack', 'OpbdSideValues', 'OpbdTable', 'ParameterEvent', 'PropLookupSegment', 'PropLookupSingle', 'PropTable', 'ROTAGlyphEntry', 'ROTAHeader', 'SCNMatrix4', 'SCNVector3', 'SCNVector4', 'SFNTLookupArrayHeader', 'SFNTLookupBinarySearchHeader', 'SFNTLookupSegment', 'SFNTLookupSegmentHeader', 'SFNTLookupSingle', 'SFNTLookupSingleHeader', 'SFNTLookupTable', 'SFNTLookupTrimmedArrayHeader', 'SFNTLookupVectorHeader', 'SMPTETime', 'STClassTable', 'STEntryOne', 'STEntryTwo', 'STEntryZero', 'STHeader', 'STXEntryOne', 'STXEntryTwo', 'STXEntryZero', 'STXHeader', 'ScheduledAudioFileRegion', 'ScheduledAudioSlice', 'SecKeychainAttribute', 'SecKeychainAttributeInfo', 'SecKeychainAttributeList', 'TrakTable', 'TrakTableData', 'TrakTableEntry', 'UIAccessibility', 'VTDecompressionOutputCallbackRecord', 'VTInt32Point', 'VTInt32Size', '_CFHTTPAuthentication', '_GLKMatrix2', '_GLKMatrix3', '_GLKMatrix4', '_GLKQuaternion', '_GLKVector2', '_GLKVector3', '_GLKVector4', '_GLKVertexAttributeParameters', '_MTLAxisAlignedBoundingBox', '_MTLPackedFloat3', '_MTLPackedFloat4x3', '_NSRange', '_NSZone', '__CFHTTPMessage', '__CFHost', '__CFNetDiagnostic', '__CFNetService', '__CFNetServiceBrowser', '__CFNetServiceMonitor', '__CFXMLNode', '__CFXMLParser', '__GLsync', '__SecAccess', '__SecCertificate', '__SecIdentity', '__SecKey', '__SecRandom', '__attribute__', 'gss_OID_desc_struct', 'gss_OID_set_desc_struct', 'gss_auth_identity', 'gss_buffer_desc_struct', 'gss_buffer_set_desc_struct', 'gss_channel_bindings_struct', 'gss_cred_id_t_desc_struct', 'gss_ctx_id_t_desc_struct', 'gss_iov_buffer_desc_struct', 'gss_krb5_cfx_keydata', 'gss_krb5_lucid_context_v1', 'gss_krb5_lucid_context_version', 'gss_krb5_lucid_key', 'gss_krb5_rfc1964_keydata', 'gss_name_t_desc_struct', 'opaqueCMBufferQueueTriggerToken', 'sfntCMapEncoding', 'sfntCMapExtendedSubHeader', 'sfntCMapHeader', 'sfntCMapSubHeader', 'sfntDescriptorHeader', 'sfntDirectory', 'sfntDirectoryEntry', 'sfntFeatureHeader', 'sfntFeatureName', 'sfntFontDescriptor', 'sfntFontFeatureSetting', 'sfntFontRunFeature', 'sfntInstance', 'sfntNameHeader', 'sfntNameRecord', 'sfntVariationAxis', 'sfntVariationHeader'}
+COCOA_INTERFACES = {'AAAttribution', 'ABNewPersonViewController', 'ABPeoplePickerNavigationController', 'ABPersonViewController', 'ABUnknownPersonViewController', 'ACAccount', 'ACAccountCredential', 'ACAccountStore', 'ACAccountType', 'ADBannerView', 'ADClient', 'ADInterstitialAd', 'ADInterstitialAdPresentationViewController', 'AEAssessmentConfiguration', 'AEAssessmentSession', 'ALAsset', 'ALAssetRepresentation', 'ALAssetsFilter', 'ALAssetsGroup', 'ALAssetsLibrary', 'APActivationPayload', 'ARAnchor', 'ARAppClipCodeAnchor', 'ARBody2D', 'ARBodyAnchor', 'ARBodyTrackingConfiguration', 'ARCamera', 'ARCoachingOverlayView', 'ARCollaborationData', 'ARConfiguration', 'ARDepthData', 'ARDirectionalLightEstimate', 'AREnvironmentProbeAnchor', 'ARFaceAnchor', 'ARFaceGeometry', 'ARFaceTrackingConfiguration', 'ARFrame', 'ARGeoAnchor', 'ARGeoTrackingConfiguration', 'ARGeoTrackingStatus', 'ARGeometryElement', 'ARGeometrySource', 'ARHitTestResult', 'ARImageAnchor', 'ARImageTrackingConfiguration', 'ARLightEstimate', 'ARMatteGenerator', 'ARMeshAnchor', 'ARMeshGeometry', 'ARObjectAnchor', 'ARObjectScanningConfiguration', 'AROrientationTrackingConfiguration', 'ARParticipantAnchor', 'ARPlaneAnchor', 'ARPlaneGeometry', 'ARPointCloud', 'ARPositionalTrackingConfiguration', 'ARQuickLookPreviewItem', 'ARRaycastQuery', 'ARRaycastResult', 'ARReferenceImage', 'ARReferenceObject', 'ARSCNFaceGeometry', 'ARSCNPlaneGeometry', 'ARSCNView', 'ARSKView', 'ARSession', 'ARSkeleton', 'ARSkeleton2D', 'ARSkeleton3D', 'ARSkeletonDefinition', 'ARTrackedRaycast', 'ARVideoFormat', 'ARView', 'ARWorldMap', 'ARWorldTrackingConfiguration', 'ASAccountAuthenticationModificationController', 'ASAccountAuthenticationModificationExtensionContext', 'ASAccountAuthenticationModificationReplacePasswordWithSignInWithAppleRequest', 'ASAccountAuthenticationModificationRequest', 'ASAccountAuthenticationModificationUpgradePasswordToStrongPasswordRequest', 'ASAccountAuthenticationModificationViewController', 'ASAuthorization', 'ASAuthorizationAppleIDButton', 'ASAuthorizationAppleIDCredential', 'ASAuthorizationAppleIDProvider', 'ASAuthorizationAppleIDRequest', 'ASAuthorizationController', 'ASAuthorizationOpenIDRequest', 'ASAuthorizationPasswordProvider', 'ASAuthorizationPasswordRequest', 'ASAuthorizationProviderExtensionAuthorizationRequest', 'ASAuthorizationRequest', 'ASAuthorizationSingleSignOnCredential', 'ASAuthorizationSingleSignOnProvider', 'ASAuthorizationSingleSignOnRequest', 'ASCredentialIdentityStore', 'ASCredentialIdentityStoreState', 'ASCredentialProviderExtensionContext', 'ASCredentialProviderViewController', 'ASCredentialServiceIdentifier', 'ASIdentifierManager', 'ASPasswordCredential', 'ASPasswordCredentialIdentity', 'ASWebAuthenticationSession', 'ASWebAuthenticationSessionRequest', 'ASWebAuthenticationSessionWebBrowserSessionManager', 'ATTrackingManager', 'AUAudioUnit', 'AUAudioUnitBus', 'AUAudioUnitBusArray', 'AUAudioUnitPreset', 'AUAudioUnitV2Bridge', 'AUAudioUnitViewConfiguration', 'AUParameter', 'AUParameterGroup', 'AUParameterNode', 'AUParameterTree', 'AUViewController', 'AVAggregateAssetDownloadTask', 'AVAsset', 'AVAssetCache', 'AVAssetDownloadStorageManagementPolicy', 'AVAssetDownloadStorageManager', 'AVAssetDownloadTask', 'AVAssetDownloadURLSession', 'AVAssetExportSession', 'AVAssetImageGenerator', 'AVAssetReader', 'AVAssetReaderAudioMixOutput', 'AVAssetReaderOutput', 'AVAssetReaderOutputMetadataAdaptor', 'AVAssetReaderSampleReferenceOutput', 'AVAssetReaderTrackOutput', 'AVAssetReaderVideoCompositionOutput', 'AVAssetResourceLoader', 'AVAssetResourceLoadingContentInformationRequest', 'AVAssetResourceLoadingDataRequest', 'AVAssetResourceLoadingRequest', 'AVAssetResourceLoadingRequestor', 'AVAssetResourceRenewalRequest', 'AVAssetSegmentReport', 'AVAssetSegmentReportSampleInformation', 'AVAssetSegmentTrackReport', 'AVAssetTrack', 'AVAssetTrackGroup', 'AVAssetTrackSegment', 'AVAssetWriter', 'AVAssetWriterInput', 'AVAssetWriterInputGroup', 'AVAssetWriterInputMetadataAdaptor', 'AVAssetWriterInputPassDescription', 'AVAssetWriterInputPixelBufferAdaptor', 'AVAsynchronousCIImageFilteringRequest', 'AVAsynchronousVideoCompositionRequest', 'AVAudioMix', 'AVAudioMixInputParameters', 'AVAudioSession', 'AVCameraCalibrationData', 'AVCaptureAudioChannel', 'AVCaptureAudioDataOutput', 'AVCaptureAudioFileOutput', 'AVCaptureAudioPreviewOutput', 'AVCaptureAutoExposureBracketedStillImageSettings', 'AVCaptureBracketedStillImageSettings', 'AVCaptureConnection', 'AVCaptureDataOutputSynchronizer', 'AVCaptureDepthDataOutput', 'AVCaptureDevice', 'AVCaptureDeviceDiscoverySession', 'AVCaptureDeviceFormat', 'AVCaptureDeviceInput', 'AVCaptureDeviceInputSource', 'AVCaptureFileOutput', 'AVCaptureInput', 'AVCaptureInputPort', 'AVCaptureManualExposureBracketedStillImageSettings', 'AVCaptureMetadataInput', 'AVCaptureMetadataOutput', 'AVCaptureMovieFileOutput', 'AVCaptureMultiCamSession', 'AVCaptureOutput', 'AVCapturePhoto', 'AVCapturePhotoBracketSettings', 'AVCapturePhotoOutput', 'AVCapturePhotoSettings', 'AVCaptureResolvedPhotoSettings', 'AVCaptureScreenInput', 'AVCaptureSession', 'AVCaptureStillImageOutput', 'AVCaptureSynchronizedData', 'AVCaptureSynchronizedDataCollection', 'AVCaptureSynchronizedDepthData', 'AVCaptureSynchronizedMetadataObjectData', 'AVCaptureSynchronizedSampleBufferData', 'AVCaptureSystemPressureState', 'AVCaptureVideoDataOutput', 'AVCaptureVideoPreviewLayer', 'AVComposition', 'AVCompositionTrack', 'AVCompositionTrackFormatDescriptionReplacement', 'AVCompositionTrackSegment', 'AVContentKeyRequest', 'AVContentKeyResponse', 'AVContentKeySession', 'AVDateRangeMetadataGroup', 'AVDepthData', 'AVDisplayCriteria', 'AVFragmentedAsset', 'AVFragmentedAssetMinder', 'AVFragmentedAssetTrack', 'AVFragmentedMovie', 'AVFragmentedMovieMinder', 'AVFragmentedMovieTrack', 'AVFrameRateRange', 'AVMediaDataStorage', 'AVMediaSelection', 'AVMediaSelectionGroup', 'AVMediaSelectionOption', 'AVMetadataBodyObject', 'AVMetadataCatBodyObject', 'AVMetadataDogBodyObject', 'AVMetadataFaceObject', 'AVMetadataGroup', 'AVMetadataHumanBodyObject', 'AVMetadataItem', 'AVMetadataItemFilter', 'AVMetadataItemValueRequest', 'AVMetadataMachineReadableCodeObject', 'AVMetadataObject', 'AVMetadataSalientObject', 'AVMovie', 'AVMovieTrack', 'AVMutableAssetDownloadStorageManagementPolicy', 'AVMutableAudioMix', 'AVMutableAudioMixInputParameters', 'AVMutableComposition', 'AVMutableCompositionTrack', 'AVMutableDateRangeMetadataGroup', 'AVMutableMediaSelection', 'AVMutableMetadataItem', 'AVMutableMovie', 'AVMutableMovieTrack', 'AVMutableTimedMetadataGroup', 'AVMutableVideoComposition', 'AVMutableVideoCompositionInstruction', 'AVMutableVideoCompositionLayerInstruction', 'AVOutputSettingsAssistant', 'AVPersistableContentKeyRequest', 'AVPictureInPictureController', 'AVPlayer', 'AVPlayerItem', 'AVPlayerItemAccessLog', 'AVPlayerItemAccessLogEvent', 'AVPlayerItemErrorLog', 'AVPlayerItemErrorLogEvent', 'AVPlayerItemLegibleOutput', 'AVPlayerItemMediaDataCollector', 'AVPlayerItemMetadataCollector', 'AVPlayerItemMetadataOutput', 'AVPlayerItemOutput', 'AVPlayerItemTrack', 'AVPlayerItemVideoOutput', 'AVPlayerLayer', 'AVPlayerLooper', 'AVPlayerMediaSelectionCriteria', 'AVPlayerViewController', 'AVPortraitEffectsMatte', 'AVQueuePlayer', 'AVRouteDetector', 'AVRoutePickerView', 'AVSampleBufferAudioRenderer', 'AVSampleBufferDisplayLayer', 'AVSampleBufferRenderSynchronizer', 'AVSemanticSegmentationMatte', 'AVSynchronizedLayer', 'AVTextStyleRule', 'AVTimedMetadataGroup', 'AVURLAsset', 'AVVideoComposition', 'AVVideoCompositionCoreAnimationTool', 'AVVideoCompositionInstruction', 'AVVideoCompositionLayerInstruction', 'AVVideoCompositionRenderContext', 'AVVideoCompositionRenderHint', 'AXCustomContent', 'BCChatAction', 'BCChatButton', 'BGAppRefreshTask', 'BGAppRefreshTaskRequest', 'BGProcessingTask', 'BGProcessingTaskRequest', 'BGTask', 'BGTaskRequest', 'BGTaskScheduler', 'CAAnimation', 'CAAnimationGroup', 'CABTMIDICentralViewController', 'CABTMIDILocalPeripheralViewController', 'CABasicAnimation', 'CADisplayLink', 'CAEAGLLayer', 'CAEmitterCell', 'CAEmitterLayer', 'CAGradientLayer', 'CAInterAppAudioSwitcherView', 'CAInterAppAudioTransportView', 'CAKeyframeAnimation', 'CALayer', 'CAMediaTimingFunction', 'CAMetalLayer', 'CAPropertyAnimation', 'CAReplicatorLayer', 'CAScrollLayer', 'CAShapeLayer', 'CASpringAnimation', 'CATextLayer', 'CATiledLayer', 'CATransaction', 'CATransformLayer', 'CATransition', 'CAValueFunction', 'CBATTRequest', 'CBAttribute', 'CBCentral', 'CBCentralManager', 'CBCharacteristic', 'CBDescriptor', 'CBL2CAPChannel', 'CBManager', 'CBMutableCharacteristic', 'CBMutableDescriptor', 'CBMutableService', 'CBPeer', 'CBPeripheral', 'CBPeripheralManager', 'CBService', 'CBUUID', 'CHHapticDynamicParameter', 'CHHapticEngine', 'CHHapticEvent', 'CHHapticEventParameter', 'CHHapticParameterCurve', 'CHHapticParameterCurveControlPoint', 'CHHapticPattern', 'CIAztecCodeDescriptor', 'CIBarcodeDescriptor', 'CIBlendKernel', 'CIColor', 'CIColorKernel', 'CIContext', 'CIDataMatrixCodeDescriptor', 'CIDetector', 'CIFaceFeature', 'CIFeature', 'CIFilter', 'CIFilterGenerator', 'CIFilterShape', 'CIImage', 'CIImageAccumulator', 'CIImageProcessorKernel', 'CIKernel', 'CIPDF417CodeDescriptor', 'CIPlugIn', 'CIQRCodeDescriptor', 'CIQRCodeFeature', 'CIRectangleFeature', 'CIRenderDestination', 'CIRenderInfo', 'CIRenderTask', 'CISampler', 'CITextFeature', 'CIVector', 'CIWarpKernel', 'CKAcceptSharesOperation', 'CKAsset', 'CKContainer', 'CKDatabase', 'CKDatabaseNotification', 'CKDatabaseOperation', 'CKDatabaseSubscription', 'CKDiscoverAllUserIdentitiesOperation', 'CKDiscoverUserIdentitiesOperation', 'CKFetchDatabaseChangesOperation', 'CKFetchNotificationChangesOperation', 'CKFetchRecordChangesOperation', 'CKFetchRecordZoneChangesConfiguration', 'CKFetchRecordZoneChangesOperation', 'CKFetchRecordZoneChangesOptions', 'CKFetchRecordZonesOperation', 'CKFetchRecordsOperation', 'CKFetchShareMetadataOperation', 'CKFetchShareParticipantsOperation', 'CKFetchSubscriptionsOperation', 'CKFetchWebAuthTokenOperation', 'CKLocationSortDescriptor', 'CKMarkNotificationsReadOperation', 'CKModifyBadgeOperation', 'CKModifyRecordZonesOperation', 'CKModifyRecordsOperation', 'CKModifySubscriptionsOperation', 'CKNotification', 'CKNotificationID', 'CKNotificationInfo', 'CKOperation', 'CKOperationConfiguration', 'CKOperationGroup', 'CKQuery', 'CKQueryCursor', 'CKQueryNotification', 'CKQueryOperation', 'CKQuerySubscription', 'CKRecord', 'CKRecordID', 'CKRecordZone', 'CKRecordZoneID', 'CKRecordZoneNotification', 'CKRecordZoneSubscription', 'CKReference', 'CKServerChangeToken', 'CKShare', 'CKShareMetadata', 'CKShareParticipant', 'CKSubscription', 'CKUserIdentity', 'CKUserIdentityLookupInfo', 'CLBeacon', 'CLBeaconIdentityConstraint', 'CLBeaconRegion', 'CLCircularRegion', 'CLFloor', 'CLGeocoder', 'CLHeading', 'CLKComplication', 'CLKComplicationDescriptor', 'CLKComplicationServer', 'CLKComplicationTemplate', 'CLKComplicationTemplateCircularSmallRingImage', 'CLKComplicationTemplateCircularSmallRingText', 'CLKComplicationTemplateCircularSmallSimpleImage', 'CLKComplicationTemplateCircularSmallSimpleText', 'CLKComplicationTemplateCircularSmallStackImage', 'CLKComplicationTemplateCircularSmallStackText', 'CLKComplicationTemplateExtraLargeColumnsText', 'CLKComplicationTemplateExtraLargeRingImage', 'CLKComplicationTemplateExtraLargeRingText', 'CLKComplicationTemplateExtraLargeSimpleImage', 'CLKComplicationTemplateExtraLargeSimpleText', 'CLKComplicationTemplateExtraLargeStackImage', 'CLKComplicationTemplateExtraLargeStackText', 'CLKComplicationTemplateGraphicBezelCircularText', 'CLKComplicationTemplateGraphicCircular', 'CLKComplicationTemplateGraphicCircularClosedGaugeImage', 'CLKComplicationTemplateGraphicCircularClosedGaugeText', 'CLKComplicationTemplateGraphicCircularImage', 'CLKComplicationTemplateGraphicCircularOpenGaugeImage', 'CLKComplicationTemplateGraphicCircularOpenGaugeRangeText', 'CLKComplicationTemplateGraphicCircularOpenGaugeSimpleText', 'CLKComplicationTemplateGraphicCircularStackImage', 'CLKComplicationTemplateGraphicCircularStackText', 'CLKComplicationTemplateGraphicCornerCircularImage', 'CLKComplicationTemplateGraphicCornerGaugeImage', 'CLKComplicationTemplateGraphicCornerGaugeText', 'CLKComplicationTemplateGraphicCornerStackText', 'CLKComplicationTemplateGraphicCornerTextImage', 'CLKComplicationTemplateGraphicExtraLargeCircular', 'CLKComplicationTemplateGraphicExtraLargeCircularClosedGaugeImage', 'CLKComplicationTemplateGraphicExtraLargeCircularClosedGaugeText', 'CLKComplicationTemplateGraphicExtraLargeCircularImage', 'CLKComplicationTemplateGraphicExtraLargeCircularOpenGaugeImage', 'CLKComplicationTemplateGraphicExtraLargeCircularOpenGaugeRangeText', 'CLKComplicationTemplateGraphicExtraLargeCircularOpenGaugeSimpleText', 'CLKComplicationTemplateGraphicExtraLargeCircularStackImage', 'CLKComplicationTemplateGraphicExtraLargeCircularStackText', 'CLKComplicationTemplateGraphicRectangularFullImage', 'CLKComplicationTemplateGraphicRectangularLargeImage', 'CLKComplicationTemplateGraphicRectangularStandardBody', 'CLKComplicationTemplateGraphicRectangularTextGauge', 'CLKComplicationTemplateModularLargeColumns', 'CLKComplicationTemplateModularLargeStandardBody', 'CLKComplicationTemplateModularLargeTable', 'CLKComplicationTemplateModularLargeTallBody', 'CLKComplicationTemplateModularSmallColumnsText', 'CLKComplicationTemplateModularSmallRingImage', 'CLKComplicationTemplateModularSmallRingText', 'CLKComplicationTemplateModularSmallSimpleImage', 'CLKComplicationTemplateModularSmallSimpleText', 'CLKComplicationTemplateModularSmallStackImage', 'CLKComplicationTemplateModularSmallStackText', 'CLKComplicationTemplateUtilitarianLargeFlat', 'CLKComplicationTemplateUtilitarianSmallFlat', 'CLKComplicationTemplateUtilitarianSmallRingImage', 'CLKComplicationTemplateUtilitarianSmallRingText', 'CLKComplicationTemplateUtilitarianSmallSquare', 'CLKComplicationTimelineEntry', 'CLKDateTextProvider', 'CLKFullColorImageProvider', 'CLKGaugeProvider', 'CLKImageProvider', 'CLKRelativeDateTextProvider', 'CLKSimpleGaugeProvider', 'CLKSimpleTextProvider', 'CLKTextProvider', 'CLKTimeIntervalGaugeProvider', 'CLKTimeIntervalTextProvider', 'CLKTimeTextProvider', 'CLKWatchFaceLibrary', 'CLLocation', 'CLLocationManager', 'CLPlacemark', 'CLRegion', 'CLSActivity', 'CLSActivityItem', 'CLSBinaryItem', 'CLSContext', 'CLSDataStore', 'CLSObject', 'CLSProgressReportingCapability', 'CLSQuantityItem', 'CLSScoreItem', 'CLVisit', 'CMAccelerometerData', 'CMAltimeter', 'CMAltitudeData', 'CMAttitude', 'CMDeviceMotion', 'CMDyskineticSymptomResult', 'CMFallDetectionEvent', 'CMFallDetectionManager', 'CMGyroData', 'CMHeadphoneMotionManager', 'CMLogItem', 'CMMagnetometerData', 'CMMotionActivity', 'CMMotionActivityManager', 'CMMotionManager', 'CMMovementDisorderManager', 'CMPedometer', 'CMPedometerData', 'CMPedometerEvent', 'CMRecordedAccelerometerData', 'CMRecordedRotationRateData', 'CMRotationRateData', 'CMSensorDataList', 'CMSensorRecorder', 'CMStepCounter', 'CMTremorResult', 'CNChangeHistoryAddContactEvent', 'CNChangeHistoryAddGroupEvent', 'CNChangeHistoryAddMemberToGroupEvent', 'CNChangeHistoryAddSubgroupToGroupEvent', 'CNChangeHistoryDeleteContactEvent', 'CNChangeHistoryDeleteGroupEvent', 'CNChangeHistoryDropEverythingEvent', 'CNChangeHistoryEvent', 'CNChangeHistoryFetchRequest', 'CNChangeHistoryRemoveMemberFromGroupEvent', 'CNChangeHistoryRemoveSubgroupFromGroupEvent', 'CNChangeHistoryUpdateContactEvent', 'CNChangeHistoryUpdateGroupEvent', 'CNContact', 'CNContactFetchRequest', 'CNContactFormatter', 'CNContactPickerViewController', 'CNContactProperty', 'CNContactRelation', 'CNContactStore', 'CNContactVCardSerialization', 'CNContactViewController', 'CNContactsUserDefaults', 'CNContainer', 'CNFetchRequest', 'CNFetchResult', 'CNGroup', 'CNInstantMessageAddress', 'CNLabeledValue', 'CNMutableContact', 'CNMutableGroup', 'CNMutablePostalAddress', 'CNPhoneNumber', 'CNPostalAddress', 'CNPostalAddressFormatter', 'CNSaveRequest', 'CNSocialProfile', 'CPActionSheetTemplate', 'CPAlertAction', 'CPAlertTemplate', 'CPBarButton', 'CPButton', 'CPContact', 'CPContactCallButton', 'CPContactDirectionsButton', 'CPContactMessageButton', 'CPContactTemplate', 'CPDashboardButton', 'CPDashboardController', 'CPGridButton', 'CPGridTemplate', 'CPImageSet', 'CPInformationItem', 'CPInformationRatingItem', 'CPInformationTemplate', 'CPInterfaceController', 'CPListImageRowItem', 'CPListItem', 'CPListSection', 'CPListTemplate', 'CPManeuver', 'CPMapButton', 'CPMapTemplate', 'CPMessageComposeBarButton', 'CPMessageListItem', 'CPMessageListItemLeadingConfiguration', 'CPMessageListItemTrailingConfiguration', 'CPNavigationAlert', 'CPNavigationSession', 'CPNowPlayingAddToLibraryButton', 'CPNowPlayingButton', 'CPNowPlayingImageButton', 'CPNowPlayingMoreButton', 'CPNowPlayingPlaybackRateButton', 'CPNowPlayingRepeatButton', 'CPNowPlayingShuffleButton', 'CPNowPlayingTemplate', 'CPPointOfInterest', 'CPPointOfInterestTemplate', 'CPRouteChoice', 'CPSearchTemplate', 'CPSessionConfiguration', 'CPTabBarTemplate', 'CPTemplate', 'CPTemplateApplicationDashboardScene', 'CPTemplateApplicationScene', 'CPTextButton', 'CPTravelEstimates', 'CPTrip', 'CPTripPreviewTextConfiguration', 'CPVoiceControlState', 'CPVoiceControlTemplate', 'CPWindow', 'CSCustomAttributeKey', 'CSIndexExtensionRequestHandler', 'CSLocalizedString', 'CSPerson', 'CSSearchQuery', 'CSSearchableIndex', 'CSSearchableItem', 'CSSearchableItemAttributeSet', 'CTCall', 'CTCallCenter', 'CTCarrier', 'CTCellularData', 'CTCellularPlanProvisioning', 'CTCellularPlanProvisioningRequest', 'CTSubscriber', 'CTSubscriberInfo', 'CTTelephonyNetworkInfo', 'CXAction', 'CXAnswerCallAction', 'CXCall', 'CXCallAction', 'CXCallController', 'CXCallDirectoryExtensionContext', 'CXCallDirectoryManager', 'CXCallDirectoryProvider', 'CXCallObserver', 'CXCallUpdate', 'CXEndCallAction', 'CXHandle', 'CXPlayDTMFCallAction', 'CXProvider', 'CXProviderConfiguration', 'CXSetGroupCallAction', 'CXSetHeldCallAction', 'CXSetMutedCallAction', 'CXStartCallAction', 'CXTransaction', 'DCAppAttestService', 'DCDevice', 'EAAccessory', 'EAAccessoryManager', 'EAGLContext', 'EAGLSharegroup', 'EASession', 'EAWiFiUnconfiguredAccessory', 'EAWiFiUnconfiguredAccessoryBrowser', 'EKAlarm', 'EKCalendar', 'EKCalendarChooser', 'EKCalendarItem', 'EKEvent', 'EKEventEditViewController', 'EKEventStore', 'EKEventViewController', 'EKObject', 'EKParticipant', 'EKRecurrenceDayOfWeek', 'EKRecurrenceEnd', 'EKRecurrenceRule', 'EKReminder', 'EKSource', 'EKStructuredLocation', 'ENExposureConfiguration', 'ENExposureDaySummary', 'ENExposureDetectionSummary', 'ENExposureInfo', 'ENExposureSummaryItem', 'ENExposureWindow', 'ENManager', 'ENScanInstance', 'ENTemporaryExposureKey', 'EntityRotationGestureRecognizer', 'EntityScaleGestureRecognizer', 'EntityTranslationGestureRecognizer', 'FPUIActionExtensionContext', 'FPUIActionExtensionViewController', 'GCColor', 'GCController', 'GCControllerAxisInput', 'GCControllerButtonInput', 'GCControllerDirectionPad', 'GCControllerElement', 'GCControllerTouchpad', 'GCDeviceBattery', 'GCDeviceCursor', 'GCDeviceHaptics', 'GCDeviceLight', 'GCDirectionalGamepad', 'GCDualShockGamepad', 'GCEventViewController', 'GCExtendedGamepad', 'GCExtendedGamepadSnapshot', 'GCGamepad', 'GCGamepadSnapshot', 'GCKeyboard', 'GCKeyboardInput', 'GCMicroGamepad', 'GCMicroGamepadSnapshot', 'GCMotion', 'GCMouse', 'GCMouseInput', 'GCPhysicalInputProfile', 'GCXboxGamepad', 'GKARC4RandomSource', 'GKAccessPoint', 'GKAchievement', 'GKAchievementChallenge', 'GKAchievementDescription', 'GKAchievementViewController', 'GKAgent', 'GKAgent2D', 'GKAgent3D', 'GKBasePlayer', 'GKBehavior', 'GKBillowNoiseSource', 'GKChallenge', 'GKChallengeEventHandler', 'GKCheckerboardNoiseSource', 'GKCircleObstacle', 'GKCloudPlayer', 'GKCoherentNoiseSource', 'GKComponent', 'GKComponentSystem', 'GKCompositeBehavior', 'GKConstantNoiseSource', 'GKCylindersNoiseSource', 'GKDecisionNode', 'GKDecisionTree', 'GKEntity', 'GKFriendRequestComposeViewController', 'GKGameCenterViewController', 'GKGameSession', 'GKGameSessionSharingViewController', 'GKGaussianDistribution', 'GKGoal', 'GKGraph', 'GKGraphNode', 'GKGraphNode2D', 'GKGraphNode3D', 'GKGridGraph', 'GKGridGraphNode', 'GKInvite', 'GKLeaderboard', 'GKLeaderboardEntry', 'GKLeaderboardScore', 'GKLeaderboardSet', 'GKLeaderboardViewController', 'GKLinearCongruentialRandomSource', 'GKLocalPlayer', 'GKMatch', 'GKMatchRequest', 'GKMatchmaker', 'GKMatchmakerViewController', 'GKMersenneTwisterRandomSource', 'GKMeshGraph', 'GKMinmaxStrategist', 'GKMonteCarloStrategist', 'GKNSPredicateRule', 'GKNoise', 'GKNoiseMap', 'GKNoiseSource', 'GKNotificationBanner', 'GKObstacle', 'GKObstacleGraph', 'GKOctree', 'GKOctreeNode', 'GKPath', 'GKPeerPickerController', 'GKPerlinNoiseSource', 'GKPlayer', 'GKPolygonObstacle', 'GKQuadtree', 'GKQuadtreeNode', 'GKRTree', 'GKRandomDistribution', 'GKRandomSource', 'GKRidgedNoiseSource', 'GKRule', 'GKRuleSystem', 'GKSCNNodeComponent', 'GKSKNodeComponent', 'GKSavedGame', 'GKScene', 'GKScore', 'GKScoreChallenge', 'GKSession', 'GKShuffledDistribution', 'GKSphereObstacle', 'GKSpheresNoiseSource', 'GKState', 'GKStateMachine', 'GKTurnBasedEventHandler', 'GKTurnBasedExchangeReply', 'GKTurnBasedMatch', 'GKTurnBasedMatchmakerViewController', 'GKTurnBasedParticipant', 'GKVoiceChat', 'GKVoiceChatService', 'GKVoronoiNoiseSource', 'GLKBaseEffect', 'GLKEffectProperty', 'GLKEffectPropertyFog', 'GLKEffectPropertyLight', 'GLKEffectPropertyMaterial', 'GLKEffectPropertyTexture', 'GLKEffectPropertyTransform', 'GLKMesh', 'GLKMeshBuffer', 'GLKMeshBufferAllocator', 'GLKReflectionMapEffect', 'GLKSkyboxEffect', 'GLKSubmesh', 'GLKTextureInfo', 'GLKTextureLoader', 'GLKView', 'GLKViewController', 'HKActivityMoveModeObject', 'HKActivityRingView', 'HKActivitySummary', 'HKActivitySummaryQuery', 'HKActivitySummaryType', 'HKAnchoredObjectQuery', 'HKAudiogramSample', 'HKAudiogramSampleType', 'HKAudiogramSensitivityPoint', 'HKBiologicalSexObject', 'HKBloodTypeObject', 'HKCDADocument', 'HKCDADocumentSample', 'HKCategorySample', 'HKCategoryType', 'HKCharacteristicType', 'HKClinicalRecord', 'HKClinicalType', 'HKCorrelation', 'HKCorrelationQuery', 'HKCorrelationType', 'HKCumulativeQuantitySample', 'HKCumulativeQuantitySeriesSample', 'HKDeletedObject', 'HKDevice', 'HKDiscreteQuantitySample', 'HKDocumentQuery', 'HKDocumentSample', 'HKDocumentType', 'HKElectrocardiogram', 'HKElectrocardiogramQuery', 'HKElectrocardiogramType', 'HKElectrocardiogramVoltageMeasurement', 'HKFHIRResource', 'HKFHIRVersion', 'HKFitzpatrickSkinTypeObject', 'HKHealthStore', 'HKHeartbeatSeriesBuilder', 'HKHeartbeatSeriesQuery', 'HKHeartbeatSeriesSample', 'HKLiveWorkoutBuilder', 'HKLiveWorkoutDataSource', 'HKObject', 'HKObjectType', 'HKObserverQuery', 'HKQuantity', 'HKQuantitySample', 'HKQuantitySeriesSampleBuilder', 'HKQuantitySeriesSampleQuery', 'HKQuantityType', 'HKQuery', 'HKQueryAnchor', 'HKSample', 'HKSampleQuery', 'HKSampleType', 'HKSeriesBuilder', 'HKSeriesSample', 'HKSeriesType', 'HKSource', 'HKSourceQuery', 'HKSourceRevision', 'HKStatistics', 'HKStatisticsCollection', 'HKStatisticsCollectionQuery', 'HKStatisticsQuery', 'HKUnit', 'HKWheelchairUseObject', 'HKWorkout', 'HKWorkoutBuilder', 'HKWorkoutConfiguration', 'HKWorkoutEvent', 'HKWorkoutRoute', 'HKWorkoutRouteBuilder', 'HKWorkoutRouteQuery', 'HKWorkoutSession', 'HKWorkoutType', 'HMAccessControl', 'HMAccessory', 'HMAccessoryBrowser', 'HMAccessoryCategory', 'HMAccessoryOwnershipToken', 'HMAccessoryProfile', 'HMAccessorySetupPayload', 'HMAction', 'HMActionSet', 'HMAddAccessoryRequest', 'HMCalendarEvent', 'HMCameraAudioControl', 'HMCameraControl', 'HMCameraProfile', 'HMCameraSettingsControl', 'HMCameraSnapshot', 'HMCameraSnapshotControl', 'HMCameraSource', 'HMCameraStream', 'HMCameraStreamControl', 'HMCameraView', 'HMCharacteristic', 'HMCharacteristicEvent', 'HMCharacteristicMetadata', 'HMCharacteristicThresholdRangeEvent', 'HMCharacteristicWriteAction', 'HMDurationEvent', 'HMEvent', 'HMEventTrigger', 'HMHome', 'HMHomeAccessControl', 'HMHomeManager', 'HMLocationEvent', 'HMMutableCalendarEvent', 'HMMutableCharacteristicEvent', 'HMMutableCharacteristicThresholdRangeEvent', 'HMMutableDurationEvent', 'HMMutableLocationEvent', 'HMMutablePresenceEvent', 'HMMutableSignificantTimeEvent', 'HMNetworkConfigurationProfile', 'HMNumberRange', 'HMPresenceEvent', 'HMRoom', 'HMService', 'HMServiceGroup', 'HMSignificantTimeEvent', 'HMTimeEvent', 'HMTimerTrigger', 'HMTrigger', 'HMUser', 'HMZone', 'ICCameraDevice', 'ICCameraFile', 'ICCameraFolder', 'ICCameraItem', 'ICDevice', 'ICDeviceBrowser', 'ICScannerBandData', 'ICScannerDevice', 'ICScannerFeature', 'ICScannerFeatureBoolean', 'ICScannerFeatureEnumeration', 'ICScannerFeatureRange', 'ICScannerFeatureTemplate', 'ICScannerFunctionalUnit', 'ICScannerFunctionalUnitDocumentFeeder', 'ICScannerFunctionalUnitFlatbed', 'ICScannerFunctionalUnitNegativeTransparency', 'ICScannerFunctionalUnitPositiveTransparency', 'ILCallClassificationRequest', 'ILCallCommunication', 'ILClassificationRequest', 'ILClassificationResponse', 'ILClassificationUIExtensionContext', 'ILClassificationUIExtensionViewController', 'ILCommunication', 'ILMessageClassificationRequest', 'ILMessageCommunication', 'ILMessageFilterExtension', 'ILMessageFilterExtensionContext', 'ILMessageFilterQueryRequest', 'ILMessageFilterQueryResponse', 'ILNetworkResponse', 'INAccountTypeResolutionResult', 'INActivateCarSignalIntent', 'INActivateCarSignalIntentResponse', 'INAddMediaIntent', 'INAddMediaIntentResponse', 'INAddMediaMediaDestinationResolutionResult', 'INAddMediaMediaItemResolutionResult', 'INAddTasksIntent', 'INAddTasksIntentResponse', 'INAddTasksTargetTaskListResolutionResult', 'INAddTasksTemporalEventTriggerResolutionResult', 'INAirline', 'INAirport', 'INAirportGate', 'INAppendToNoteIntent', 'INAppendToNoteIntentResponse', 'INBalanceAmount', 'INBalanceTypeResolutionResult', 'INBillDetails', 'INBillPayee', 'INBillPayeeResolutionResult', 'INBillTypeResolutionResult', 'INBoatReservation', 'INBoatTrip', 'INBookRestaurantReservationIntent', 'INBookRestaurantReservationIntentResponse', 'INBooleanResolutionResult', 'INBusReservation', 'INBusTrip', 'INCallCapabilityResolutionResult', 'INCallDestinationTypeResolutionResult', 'INCallRecord', 'INCallRecordFilter', 'INCallRecordResolutionResult', 'INCallRecordTypeOptionsResolutionResult', 'INCallRecordTypeResolutionResult', 'INCancelRideIntent', 'INCancelRideIntentResponse', 'INCancelWorkoutIntent', 'INCancelWorkoutIntentResponse', 'INCar', 'INCarAirCirculationModeResolutionResult', 'INCarAudioSourceResolutionResult', 'INCarDefrosterResolutionResult', 'INCarHeadUnit', 'INCarSeatResolutionResult', 'INCarSignalOptionsResolutionResult', 'INCreateNoteIntent', 'INCreateNoteIntentResponse', 'INCreateTaskListIntent', 'INCreateTaskListIntentResponse', 'INCurrencyAmount', 'INCurrencyAmountResolutionResult', 'INDailyRoutineRelevanceProvider', 'INDateComponentsRange', 'INDateComponentsRangeResolutionResult', 'INDateComponentsResolutionResult', 'INDateRelevanceProvider', 'INDateSearchTypeResolutionResult', 'INDefaultCardTemplate', 'INDeleteTasksIntent', 'INDeleteTasksIntentResponse', 'INDeleteTasksTaskListResolutionResult', 'INDeleteTasksTaskResolutionResult', 'INDoubleResolutionResult', 'INEndWorkoutIntent', 'INEndWorkoutIntentResponse', 'INEnergyResolutionResult', 'INEnumResolutionResult', 'INExtension', 'INFile', 'INFileResolutionResult', 'INFlight', 'INFlightReservation', 'INGetAvailableRestaurantReservationBookingDefaultsIntent', 'INGetAvailableRestaurantReservationBookingDefaultsIntentResponse', 'INGetAvailableRestaurantReservationBookingsIntent', 'INGetAvailableRestaurantReservationBookingsIntentResponse', 'INGetCarLockStatusIntent', 'INGetCarLockStatusIntentResponse', 'INGetCarPowerLevelStatusIntent', 'INGetCarPowerLevelStatusIntentResponse', 'INGetReservationDetailsIntent', 'INGetReservationDetailsIntentResponse', 'INGetRestaurantGuestIntent', 'INGetRestaurantGuestIntentResponse', 'INGetRideStatusIntent', 'INGetRideStatusIntentResponse', 'INGetUserCurrentRestaurantReservationBookingsIntent', 'INGetUserCurrentRestaurantReservationBookingsIntentResponse', 'INGetVisualCodeIntent', 'INGetVisualCodeIntentResponse', 'INImage', 'INImageNoteContent', 'INIntegerResolutionResult', 'INIntent', 'INIntentResolutionResult', 'INIntentResponse', 'INInteraction', 'INLengthResolutionResult', 'INListCarsIntent', 'INListCarsIntentResponse', 'INListRideOptionsIntent', 'INListRideOptionsIntentResponse', 'INLocationRelevanceProvider', 'INLocationSearchTypeResolutionResult', 'INLodgingReservation', 'INMassResolutionResult', 'INMediaAffinityTypeResolutionResult', 'INMediaDestination', 'INMediaDestinationResolutionResult', 'INMediaItem', 'INMediaItemResolutionResult', 'INMediaSearch', 'INMediaUserContext', 'INMessage', 'INMessageAttributeOptionsResolutionResult', 'INMessageAttributeResolutionResult', 'INNote', 'INNoteContent', 'INNoteContentResolutionResult', 'INNoteContentTypeResolutionResult', 'INNoteResolutionResult', 'INNotebookItemTypeResolutionResult', 'INObject', 'INObjectCollection', 'INObjectResolutionResult', 'INObjectSection', 'INOutgoingMessageTypeResolutionResult', 'INParameter', 'INPauseWorkoutIntent', 'INPauseWorkoutIntentResponse', 'INPayBillIntent', 'INPayBillIntentResponse', 'INPaymentAccount', 'INPaymentAccountResolutionResult', 'INPaymentAmount', 'INPaymentAmountResolutionResult', 'INPaymentMethod', 'INPaymentMethodResolutionResult', 'INPaymentRecord', 'INPaymentStatusResolutionResult', 'INPerson', 'INPersonHandle', 'INPersonResolutionResult', 'INPlacemarkResolutionResult', 'INPlayMediaIntent', 'INPlayMediaIntentResponse', 'INPlayMediaMediaItemResolutionResult', 'INPlayMediaPlaybackSpeedResolutionResult', 'INPlaybackQueueLocationResolutionResult', 'INPlaybackRepeatModeResolutionResult', 'INPreferences', 'INPriceRange', 'INRadioTypeResolutionResult', 'INRecurrenceRule', 'INRelativeReferenceResolutionResult', 'INRelativeSettingResolutionResult', 'INRelevanceProvider', 'INRelevantShortcut', 'INRelevantShortcutStore', 'INRentalCar', 'INRentalCarReservation', 'INRequestPaymentCurrencyAmountResolutionResult', 'INRequestPaymentIntent', 'INRequestPaymentIntentResponse', 'INRequestPaymentPayerResolutionResult', 'INRequestRideIntent', 'INRequestRideIntentResponse', 'INReservation', 'INReservationAction', 'INRestaurant', 'INRestaurantGuest', 'INRestaurantGuestDisplayPreferences', 'INRestaurantGuestResolutionResult', 'INRestaurantOffer', 'INRestaurantReservation', 'INRestaurantReservationBooking', 'INRestaurantReservationUserBooking', 'INRestaurantResolutionResult', 'INResumeWorkoutIntent', 'INResumeWorkoutIntentResponse', 'INRideCompletionStatus', 'INRideDriver', 'INRideFareLineItem', 'INRideOption', 'INRidePartySizeOption', 'INRideStatus', 'INRideVehicle', 'INSaveProfileInCarIntent', 'INSaveProfileInCarIntentResponse', 'INSearchCallHistoryIntent', 'INSearchCallHistoryIntentResponse', 'INSearchForAccountsIntent', 'INSearchForAccountsIntentResponse', 'INSearchForBillsIntent', 'INSearchForBillsIntentResponse', 'INSearchForMediaIntent', 'INSearchForMediaIntentResponse', 'INSearchForMediaMediaItemResolutionResult', 'INSearchForMessagesIntent', 'INSearchForMessagesIntentResponse', 'INSearchForNotebookItemsIntent', 'INSearchForNotebookItemsIntentResponse', 'INSearchForPhotosIntent', 'INSearchForPhotosIntentResponse', 'INSeat', 'INSendMessageAttachment', 'INSendMessageIntent', 'INSendMessageIntentResponse', 'INSendMessageRecipientResolutionResult', 'INSendPaymentCurrencyAmountResolutionResult', 'INSendPaymentIntent', 'INSendPaymentIntentResponse', 'INSendPaymentPayeeResolutionResult', 'INSendRideFeedbackIntent', 'INSendRideFeedbackIntentResponse', 'INSetAudioSourceInCarIntent', 'INSetAudioSourceInCarIntentResponse', 'INSetCarLockStatusIntent', 'INSetCarLockStatusIntentResponse', 'INSetClimateSettingsInCarIntent', 'INSetClimateSettingsInCarIntentResponse', 'INSetDefrosterSettingsInCarIntent', 'INSetDefrosterSettingsInCarIntentResponse', 'INSetMessageAttributeIntent', 'INSetMessageAttributeIntentResponse', 'INSetProfileInCarIntent', 'INSetProfileInCarIntentResponse', 'INSetRadioStationIntent', 'INSetRadioStationIntentResponse', 'INSetSeatSettingsInCarIntent', 'INSetSeatSettingsInCarIntentResponse', 'INSetTaskAttributeIntent', 'INSetTaskAttributeIntentResponse', 'INSetTaskAttributeTemporalEventTriggerResolutionResult', 'INShortcut', 'INSnoozeTasksIntent', 'INSnoozeTasksIntentResponse', 'INSnoozeTasksTaskResolutionResult', 'INSpatialEventTrigger', 'INSpatialEventTriggerResolutionResult', 'INSpeakableString', 'INSpeakableStringResolutionResult', 'INSpeedResolutionResult', 'INStartAudioCallIntent', 'INStartAudioCallIntentResponse', 'INStartCallCallCapabilityResolutionResult', 'INStartCallCallRecordToCallBackResolutionResult', 'INStartCallContactResolutionResult', 'INStartCallIntent', 'INStartCallIntentResponse', 'INStartPhotoPlaybackIntent', 'INStartPhotoPlaybackIntentResponse', 'INStartVideoCallIntent', 'INStartVideoCallIntentResponse', 'INStartWorkoutIntent', 'INStartWorkoutIntentResponse', 'INStringResolutionResult', 'INTask', 'INTaskList', 'INTaskListResolutionResult', 'INTaskPriorityResolutionResult', 'INTaskResolutionResult', 'INTaskStatusResolutionResult', 'INTemperatureResolutionResult', 'INTemporalEventTrigger', 'INTemporalEventTriggerResolutionResult', 'INTemporalEventTriggerTypeOptionsResolutionResult', 'INTermsAndConditions', 'INTextNoteContent', 'INTicketedEvent', 'INTicketedEventReservation', 'INTimeIntervalResolutionResult', 'INTrainReservation', 'INTrainTrip', 'INTransferMoneyIntent', 'INTransferMoneyIntentResponse', 'INUIAddVoiceShortcutButton', 'INUIAddVoiceShortcutViewController', 'INUIEditVoiceShortcutViewController', 'INURLResolutionResult', 'INUpcomingMediaManager', 'INUpdateMediaAffinityIntent', 'INUpdateMediaAffinityIntentResponse', 'INUpdateMediaAffinityMediaItemResolutionResult', 'INUserContext', 'INVisualCodeTypeResolutionResult', 'INVocabulary', 'INVoiceShortcut', 'INVoiceShortcutCenter', 'INVolumeResolutionResult', 'INWorkoutGoalUnitTypeResolutionResult', 'INWorkoutLocationTypeResolutionResult', 'IOSurface', 'JSContext', 'JSManagedValue', 'JSValue', 'JSVirtualMachine', 'LAContext', 'LPLinkMetadata', 'LPLinkView', 'LPMetadataProvider', 'MCAdvertiserAssistant', 'MCBrowserViewController', 'MCNearbyServiceAdvertiser', 'MCNearbyServiceBrowser', 'MCPeerID', 'MCSession', 'MDLAnimatedMatrix4x4', 'MDLAnimatedQuaternion', 'MDLAnimatedQuaternionArray', 'MDLAnimatedScalar', 'MDLAnimatedScalarArray', 'MDLAnimatedValue', 'MDLAnimatedVector2', 'MDLAnimatedVector3', 'MDLAnimatedVector3Array', 'MDLAnimatedVector4', 'MDLAnimationBindComponent', 'MDLAreaLight', 'MDLAsset', 'MDLBundleAssetResolver', 'MDLCamera', 'MDLCheckerboardTexture', 'MDLColorSwatchTexture', 'MDLLight', 'MDLLightProbe', 'MDLMaterial', 'MDLMaterialProperty', 'MDLMaterialPropertyConnection', 'MDLMaterialPropertyGraph', 'MDLMaterialPropertyNode', 'MDLMatrix4x4Array', 'MDLMesh', 'MDLMeshBufferData', 'MDLMeshBufferDataAllocator', 'MDLMeshBufferMap', 'MDLMeshBufferZoneDefault', 'MDLNoiseTexture', 'MDLNormalMapTexture', 'MDLObject', 'MDLObjectContainer', 'MDLPackedJointAnimation', 'MDLPathAssetResolver', 'MDLPhotometricLight', 'MDLPhysicallyPlausibleLight', 'MDLPhysicallyPlausibleScatteringFunction', 'MDLRelativeAssetResolver', 'MDLScatteringFunction', 'MDLSkeleton', 'MDLSkyCubeTexture', 'MDLStereoscopicCamera', 'MDLSubmesh', 'MDLSubmeshTopology', 'MDLTexture', 'MDLTextureFilter', 'MDLTextureSampler', 'MDLTransform', 'MDLTransformMatrixOp', 'MDLTransformOrientOp', 'MDLTransformRotateOp', 'MDLTransformRotateXOp', 'MDLTransformRotateYOp', 'MDLTransformRotateZOp', 'MDLTransformScaleOp', 'MDLTransformStack', 'MDLTransformTranslateOp', 'MDLURLTexture', 'MDLVertexAttribute', 'MDLVertexAttributeData', 'MDLVertexBufferLayout', 'MDLVertexDescriptor', 'MDLVoxelArray', 'MFMailComposeViewController', 'MFMessageComposeViewController', 'MIDICIDeviceInfo', 'MIDICIDiscoveredNode', 'MIDICIDiscoveryManager', 'MIDICIProfile', 'MIDICIProfileState', 'MIDICIResponder', 'MIDICISession', 'MIDINetworkConnection', 'MIDINetworkHost', 'MIDINetworkSession', 'MKAnnotationView', 'MKCircle', 'MKCircleRenderer', 'MKCircleView', 'MKClusterAnnotation', 'MKCompassButton', 'MKDirections', 'MKDirectionsRequest', 'MKDirectionsResponse', 'MKDistanceFormatter', 'MKETAResponse', 'MKGeoJSONDecoder', 'MKGeoJSONFeature', 'MKGeodesicPolyline', 'MKGradientPolylineRenderer', 'MKLocalPointsOfInterestRequest', 'MKLocalSearch', 'MKLocalSearchCompleter', 'MKLocalSearchCompletion', 'MKLocalSearchRequest', 'MKLocalSearchResponse', 'MKMapCamera', 'MKMapCameraBoundary', 'MKMapCameraZoomRange', 'MKMapItem', 'MKMapSnapshot', 'MKMapSnapshotOptions', 'MKMapSnapshotter', 'MKMapView', 'MKMarkerAnnotationView', 'MKMultiPoint', 'MKMultiPolygon', 'MKMultiPolygonRenderer', 'MKMultiPolyline', 'MKMultiPolylineRenderer', 'MKOverlayPathRenderer', 'MKOverlayPathView', 'MKOverlayRenderer', 'MKOverlayView', 'MKPinAnnotationView', 'MKPitchControl', 'MKPlacemark', 'MKPointAnnotation', 'MKPointOfInterestFilter', 'MKPolygon', 'MKPolygonRenderer', 'MKPolygonView', 'MKPolyline', 'MKPolylineRenderer', 'MKPolylineView', 'MKReverseGeocoder', 'MKRoute', 'MKRouteStep', 'MKScaleView', 'MKShape', 'MKTileOverlay', 'MKTileOverlayRenderer', 'MKUserLocation', 'MKUserLocationView', 'MKUserTrackingBarButtonItem', 'MKUserTrackingButton', 'MKZoomControl', 'MLArrayBatchProvider', 'MLCActivationDescriptor', 'MLCActivationLayer', 'MLCArithmeticLayer', 'MLCBatchNormalizationLayer', 'MLCConcatenationLayer', 'MLCConvolutionDescriptor', 'MLCConvolutionLayer', 'MLCDevice', 'MLCDropoutLayer', 'MLCEmbeddingDescriptor', 'MLCEmbeddingLayer', 'MLCFullyConnectedLayer', 'MLCGramMatrixLayer', 'MLCGraph', 'MLCGroupNormalizationLayer', 'MLCInferenceGraph', 'MLCInstanceNormalizationLayer', 'MLCLSTMDescriptor', 'MLCLSTMLayer', 'MLCLayer', 'MLCLayerNormalizationLayer', 'MLCLossDescriptor', 'MLCLossLayer', 'MLCMatMulDescriptor', 'MLCMatMulLayer', 'MLCMultiheadAttentionDescriptor', 'MLCMultiheadAttentionLayer', 'MLCPaddingLayer', 'MLCPoolingDescriptor', 'MLCPoolingLayer', 'MLCReductionLayer', 'MLCReshapeLayer', 'MLCSliceLayer', 'MLCSoftmaxLayer', 'MLCSplitLayer', 'MLCTensor', 'MLCTensorData', 'MLCTensorDescriptor', 'MLCTensorOptimizerDeviceData', 'MLCTensorParameter', 'MLCTrainingGraph', 'MLCTransposeLayer', 'MLCUpsampleLayer', 'MLCYOLOLossDescriptor', 'MLCYOLOLossLayer', 'MLDictionaryConstraint', 'MLDictionaryFeatureProvider', 'MLFeatureDescription', 'MLFeatureValue', 'MLImageConstraint', 'MLImageSize', 'MLImageSizeConstraint', 'MLKey', 'MLMetricKey', 'MLModel', 'MLModelCollection', 'MLModelCollectionEntry', 'MLModelConfiguration', 'MLModelDescription', 'MLMultiArray', 'MLMultiArrayConstraint', 'MLMultiArrayShapeConstraint', 'MLNumericConstraint', 'MLParameterDescription', 'MLParameterKey', 'MLPredictionOptions', 'MLSequence', 'MLSequenceConstraint', 'MLTask', 'MLUpdateContext', 'MLUpdateProgressHandlers', 'MLUpdateTask', 'MPChangeLanguageOptionCommandEvent', 'MPChangePlaybackPositionCommand', 'MPChangePlaybackPositionCommandEvent', 'MPChangePlaybackRateCommand', 'MPChangePlaybackRateCommandEvent', 'MPChangeRepeatModeCommand', 'MPChangeRepeatModeCommandEvent', 'MPChangeShuffleModeCommand', 'MPChangeShuffleModeCommandEvent', 'MPContentItem', 'MPFeedbackCommand', 'MPFeedbackCommandEvent', 'MPMediaEntity', 'MPMediaItem', 'MPMediaItemArtwork', 'MPMediaItemCollection', 'MPMediaLibrary', 'MPMediaPickerController', 'MPMediaPlaylist', 'MPMediaPlaylistCreationMetadata', 'MPMediaPredicate', 'MPMediaPropertyPredicate', 'MPMediaQuery', 'MPMediaQuerySection', 'MPMovieAccessLog', 'MPMovieAccessLogEvent', 'MPMovieErrorLog', 'MPMovieErrorLogEvent', 'MPMoviePlayerController', 'MPMoviePlayerViewController', 'MPMusicPlayerApplicationController', 'MPMusicPlayerController', 'MPMusicPlayerControllerMutableQueue', 'MPMusicPlayerControllerQueue', 'MPMusicPlayerMediaItemQueueDescriptor', 'MPMusicPlayerPlayParameters', 'MPMusicPlayerPlayParametersQueueDescriptor', 'MPMusicPlayerQueueDescriptor', 'MPMusicPlayerStoreQueueDescriptor', 'MPNowPlayingInfoCenter', 'MPNowPlayingInfoLanguageOption', 'MPNowPlayingInfoLanguageOptionGroup', 'MPNowPlayingSession', 'MPPlayableContentManager', 'MPPlayableContentManagerContext', 'MPRatingCommand', 'MPRatingCommandEvent', 'MPRemoteCommand', 'MPRemoteCommandCenter', 'MPRemoteCommandEvent', 'MPSGraph', 'MPSGraphConvolution2DOpDescriptor', 'MPSGraphDepthwiseConvolution2DOpDescriptor', 'MPSGraphDevice', 'MPSGraphExecutionDescriptor', 'MPSGraphOperation', 'MPSGraphPooling2DOpDescriptor', 'MPSGraphShapedType', 'MPSGraphTensor', 'MPSGraphTensorData', 'MPSGraphVariableOp', 'MPSeekCommandEvent', 'MPSkipIntervalCommand', 'MPSkipIntervalCommandEvent', 'MPTimedMetadata', 'MPVolumeView', 'MSConversation', 'MSMessage', 'MSMessageLayout', 'MSMessageLiveLayout', 'MSMessageTemplateLayout', 'MSMessagesAppViewController', 'MSServiceAccount', 'MSSession', 'MSSetupSession', 'MSSticker', 'MSStickerBrowserView', 'MSStickerBrowserViewController', 'MSStickerView', 'MTKMesh', 'MTKMeshBuffer', 'MTKMeshBufferAllocator', 'MTKSubmesh', 'MTKTextureLoader', 'MTKView', 'MTLAccelerationStructureBoundingBoxGeometryDescriptor', 'MTLAccelerationStructureDescriptor', 'MTLAccelerationStructureGeometryDescriptor', 'MTLAccelerationStructureTriangleGeometryDescriptor', 'MTLArgument', 'MTLArgumentDescriptor', 'MTLArrayType', 'MTLAttribute', 'MTLAttributeDescriptor', 'MTLAttributeDescriptorArray', 'MTLBinaryArchiveDescriptor', 'MTLBlitPassDescriptor', 'MTLBlitPassSampleBufferAttachmentDescriptor', 'MTLBlitPassSampleBufferAttachmentDescriptorArray', 'MTLBufferLayoutDescriptor', 'MTLBufferLayoutDescriptorArray', 'MTLCaptureDescriptor', 'MTLCaptureManager', 'MTLCommandBufferDescriptor', 'MTLCompileOptions', 'MTLComputePassDescriptor', 'MTLComputePassSampleBufferAttachmentDescriptor', 'MTLComputePassSampleBufferAttachmentDescriptorArray', 'MTLComputePipelineDescriptor', 'MTLComputePipelineReflection', 'MTLCounterSampleBufferDescriptor', 'MTLDepthStencilDescriptor', 'MTLFunctionConstant', 'MTLFunctionConstantValues', 'MTLFunctionDescriptor', 'MTLHeapDescriptor', 'MTLIndirectCommandBufferDescriptor', 'MTLInstanceAccelerationStructureDescriptor', 'MTLIntersectionFunctionDescriptor', 'MTLIntersectionFunctionTableDescriptor', 'MTLLinkedFunctions', 'MTLPipelineBufferDescriptor', 'MTLPipelineBufferDescriptorArray', 'MTLPointerType', 'MTLPrimitiveAccelerationStructureDescriptor', 'MTLRasterizationRateLayerArray', 'MTLRasterizationRateLayerDescriptor', 'MTLRasterizationRateMapDescriptor', 'MTLRasterizationRateSampleArray', 'MTLRenderPassAttachmentDescriptor', 'MTLRenderPassColorAttachmentDescriptor', 'MTLRenderPassColorAttachmentDescriptorArray', 'MTLRenderPassDepthAttachmentDescriptor', 'MTLRenderPassDescriptor', 'MTLRenderPassSampleBufferAttachmentDescriptor', 'MTLRenderPassSampleBufferAttachmentDescriptorArray', 'MTLRenderPassStencilAttachmentDescriptor', 'MTLRenderPipelineColorAttachmentDescriptor', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MTLRenderPipelineDescriptor', 'MTLRenderPipelineReflection', 'MTLResourceStatePassDescriptor', 'MTLResourceStatePassSampleBufferAttachmentDescriptor', 'MTLResourceStatePassSampleBufferAttachmentDescriptorArray', 'MTLSamplerDescriptor', 'MTLSharedEventHandle', 'MTLSharedEventListener', 'MTLSharedTextureHandle', 'MTLStageInputOutputDescriptor', 'MTLStencilDescriptor', 'MTLStructMember', 'MTLStructType', 'MTLTextureDescriptor', 'MTLTextureReferenceType', 'MTLTileRenderPipelineColorAttachmentDescriptor', 'MTLTileRenderPipelineColorAttachmentDescriptorArray', 'MTLTileRenderPipelineDescriptor', 'MTLType', 'MTLVertexAttribute', 'MTLVertexAttributeDescriptor', 'MTLVertexAttributeDescriptorArray', 'MTLVertexBufferLayoutDescriptor', 'MTLVertexBufferLayoutDescriptorArray', 'MTLVertexDescriptor', 'MTLVisibleFunctionTableDescriptor', 'MXAnimationMetric', 'MXAppExitMetric', 'MXAppLaunchMetric', 'MXAppResponsivenessMetric', 'MXAppRunTimeMetric', 'MXAverage', 'MXBackgroundExitData', 'MXCPUExceptionDiagnostic', 'MXCPUMetric', 'MXCallStackTree', 'MXCellularConditionMetric', 'MXCrashDiagnostic', 'MXDiagnostic', 'MXDiagnosticPayload', 'MXDiskIOMetric', 'MXDiskWriteExceptionDiagnostic', 'MXDisplayMetric', 'MXForegroundExitData', 'MXGPUMetric', 'MXHangDiagnostic', 'MXHistogram', 'MXHistogramBucket', 'MXLocationActivityMetric', 'MXMemoryMetric', 'MXMetaData', 'MXMetric', 'MXMetricManager', 'MXMetricPayload', 'MXNetworkTransferMetric', 'MXSignpostIntervalData', 'MXSignpostMetric', 'MXUnitAveragePixelLuminance', 'MXUnitSignalBars', 'MyClass', 'NCWidgetController', 'NEAppProxyFlow', 'NEAppProxyProvider', 'NEAppProxyProviderManager', 'NEAppProxyTCPFlow', 'NEAppProxyUDPFlow', 'NEAppPushManager', 'NEAppPushProvider', 'NEAppRule', 'NEDNSOverHTTPSSettings', 'NEDNSOverTLSSettings', 'NEDNSProxyManager', 'NEDNSProxyProvider', 'NEDNSProxyProviderProtocol', 'NEDNSSettings', 'NEDNSSettingsManager', 'NEEvaluateConnectionRule', 'NEFilterBrowserFlow', 'NEFilterControlProvider', 'NEFilterControlVerdict', 'NEFilterDataProvider', 'NEFilterDataVerdict', 'NEFilterFlow', 'NEFilterManager', 'NEFilterNewFlowVerdict', 'NEFilterPacketContext', 'NEFilterPacketProvider', 'NEFilterProvider', 'NEFilterProviderConfiguration', 'NEFilterRemediationVerdict', 'NEFilterReport', 'NEFilterRule', 'NEFilterSettings', 'NEFilterSocketFlow', 'NEFilterVerdict', 'NEFlowMetaData', 'NEHotspotConfiguration', 'NEHotspotConfigurationManager', 'NEHotspotEAPSettings', 'NEHotspotHS20Settings', 'NEHotspotHelper', 'NEHotspotHelperCommand', 'NEHotspotHelperResponse', 'NEHotspotNetwork', 'NEIPv4Route', 'NEIPv4Settings', 'NEIPv6Route', 'NEIPv6Settings', 'NENetworkRule', 'NEOnDemandRule', 'NEOnDemandRuleConnect', 'NEOnDemandRuleDisconnect', 'NEOnDemandRuleEvaluateConnection', 'NEOnDemandRuleIgnore', 'NEPacket', 'NEPacketTunnelFlow', 'NEPacketTunnelNetworkSettings', 'NEPacketTunnelProvider', 'NEProvider', 'NEProxyServer', 'NEProxySettings', 'NETransparentProxyManager', 'NETransparentProxyNetworkSettings', 'NETransparentProxyProvider', 'NETunnelNetworkSettings', 'NETunnelProvider', 'NETunnelProviderManager', 'NETunnelProviderProtocol', 'NETunnelProviderSession', 'NEVPNConnection', 'NEVPNIKEv2SecurityAssociationParameters', 'NEVPNManager', 'NEVPNProtocol', 'NEVPNProtocolIKEv2', 'NEVPNProtocolIPSec', 'NFCISO15693CustomCommandConfiguration', 'NFCISO15693ReadMultipleBlocksConfiguration', 'NFCISO15693ReaderSession', 'NFCISO7816APDU', 'NFCNDEFMessage', 'NFCNDEFPayload', 'NFCNDEFReaderSession', 'NFCReaderSession', 'NFCTagCommandConfiguration', 'NFCTagReaderSession', 'NFCVASCommandConfiguration', 'NFCVASReaderSession', 'NFCVASResponse', 'NIConfiguration', 'NIDiscoveryToken', 'NINearbyObject', 'NINearbyPeerConfiguration', 'NISession', 'NKAssetDownload', 'NKIssue', 'NKLibrary', 'NLEmbedding', 'NLGazetteer', 'NLLanguageRecognizer', 'NLModel', 'NLModelConfiguration', 'NLTagger', 'NLTokenizer', 'NSArray', 'NSAssertionHandler', 'NSAsynchronousFetchRequest', 'NSAsynchronousFetchResult', 'NSAtomicStore', 'NSAtomicStoreCacheNode', 'NSAttributeDescription', 'NSAttributedString', 'NSAutoreleasePool', 'NSBatchDeleteRequest', 'NSBatchDeleteResult', 'NSBatchInsertRequest', 'NSBatchInsertResult', 'NSBatchUpdateRequest', 'NSBatchUpdateResult', 'NSBlockOperation', 'NSBundle', 'NSBundleResourceRequest', 'NSByteCountFormatter', 'NSCache', 'NSCachedURLResponse', 'NSCalendar', 'NSCharacterSet', 'NSCoder', 'NSCollectionLayoutAnchor', 'NSCollectionLayoutBoundarySupplementaryItem', 'NSCollectionLayoutDecorationItem', 'NSCollectionLayoutDimension', 'NSCollectionLayoutEdgeSpacing', 'NSCollectionLayoutGroup', 'NSCollectionLayoutGroupCustomItem', 'NSCollectionLayoutItem', 'NSCollectionLayoutSection', 'NSCollectionLayoutSize', 'NSCollectionLayoutSpacing', 'NSCollectionLayoutSupplementaryItem', 'NSComparisonPredicate', 'NSCompoundPredicate', 'NSCondition', 'NSConditionLock', 'NSConstantString', 'NSConstraintConflict', 'NSCoreDataCoreSpotlightDelegate', 'NSCountedSet', 'NSData', 'NSDataAsset', 'NSDataDetector', 'NSDate', 'NSDateComponents', 'NSDateComponentsFormatter', 'NSDateFormatter', 'NSDateInterval', 'NSDateIntervalFormatter', 'NSDecimalNumber', 'NSDecimalNumberHandler', 'NSDerivedAttributeDescription', 'NSDictionary', 'NSDiffableDataSourceSectionSnapshot', 'NSDiffableDataSourceSectionTransaction', 'NSDiffableDataSourceSnapshot', 'NSDiffableDataSourceTransaction', 'NSDimension', 'NSDirectoryEnumerator', 'NSEnergyFormatter', 'NSEntityDescription', 'NSEntityMapping', 'NSEntityMigrationPolicy', 'NSEnumerator', 'NSError', 'NSEvent', 'NSException', 'NSExpression', 'NSExpressionDescription', 'NSExtensionContext', 'NSExtensionItem', 'NSFetchIndexDescription', 'NSFetchIndexElementDescription', 'NSFetchRequest', 'NSFetchRequestExpression', 'NSFetchedPropertyDescription', 'NSFetchedResultsController', 'NSFileAccessIntent', 'NSFileCoordinator', 'NSFileHandle', 'NSFileManager', 'NSFileProviderDomain', 'NSFileProviderExtension', 'NSFileProviderManager', 'NSFileProviderService', 'NSFileSecurity', 'NSFileVersion', 'NSFileWrapper', 'NSFormatter', 'NSHTTPCookie', 'NSHTTPCookieStorage', 'NSHTTPURLResponse', 'NSHashTable', 'NSISO8601DateFormatter', 'NSIncrementalStore', 'NSIncrementalStoreNode', 'NSIndexPath', 'NSIndexSet', 'NSInputStream', 'NSInvocation', 'NSInvocationOperation', 'NSItemProvider', 'NSJSONSerialization', 'NSKeyedArchiver', 'NSKeyedUnarchiver', 'NSLayoutAnchor', 'NSLayoutConstraint', 'NSLayoutDimension', 'NSLayoutManager', 'NSLayoutXAxisAnchor', 'NSLayoutYAxisAnchor', 'NSLengthFormatter', 'NSLinguisticTagger', 'NSListFormatter', 'NSLocale', 'NSLock', 'NSMachPort', 'NSManagedObject', 'NSManagedObjectContext', 'NSManagedObjectID', 'NSManagedObjectModel', 'NSMapTable', 'NSMappingModel', 'NSMassFormatter', 'NSMeasurement', 'NSMeasurementFormatter', 'NSMenuToolbarItem', 'NSMergeConflict', 'NSMergePolicy', 'NSMessagePort', 'NSMetadataItem', 'NSMetadataQuery', 'NSMetadataQueryAttributeValueTuple', 'NSMetadataQueryResultGroup', 'NSMethodSignature', 'NSMigrationManager', 'NSMutableArray', 'NSMutableAttributedString', 'NSMutableCharacterSet', 'NSMutableData', 'NSMutableDictionary', 'NSMutableIndexSet', 'NSMutableOrderedSet', 'NSMutableParagraphStyle', 'NSMutableSet', 'NSMutableString', 'NSMutableURLRequest', 'NSNetService', 'NSNetServiceBrowser', 'NSNotification', 'NSNotificationCenter', 'NSNotificationQueue', 'NSNull', 'NSNumber', 'NSNumberFormatter', 'NSObject', 'NSOperation', 'NSOperationQueue', 'NSOrderedCollectionChange', 'NSOrderedCollectionDifference', 'NSOrderedSet', 'NSOrthography', 'NSOutputStream', 'NSParagraphStyle', 'NSPersistentCloudKitContainer', 'NSPersistentCloudKitContainerEvent', 'NSPersistentCloudKitContainerEventRequest', 'NSPersistentCloudKitContainerEventResult', 'NSPersistentCloudKitContainerOptions', 'NSPersistentContainer', 'NSPersistentHistoryChange', 'NSPersistentHistoryChangeRequest', 'NSPersistentHistoryResult', 'NSPersistentHistoryToken', 'NSPersistentHistoryTransaction', 'NSPersistentStore', 'NSPersistentStoreAsynchronousResult', 'NSPersistentStoreCoordinator', 'NSPersistentStoreDescription', 'NSPersistentStoreRequest', 'NSPersistentStoreResult', 'NSPersonNameComponents', 'NSPersonNameComponentsFormatter', 'NSPipe', 'NSPointerArray', 'NSPointerFunctions', 'NSPort', 'NSPredicate', 'NSProcessInfo', 'NSProgress', 'NSPropertyDescription', 'NSPropertyListSerialization', 'NSPropertyMapping', 'NSProxy', 'NSPurgeableData', 'NSQueryGenerationToken', 'NSRecursiveLock', 'NSRegularExpression', 'NSRelationshipDescription', 'NSRelativeDateTimeFormatter', 'NSRunLoop', 'NSSaveChangesRequest', 'NSScanner', 'NSSecureUnarchiveFromDataTransformer', 'NSSet', 'NSShadow', 'NSSharingServicePickerToolbarItem', 'NSSharingServicePickerTouchBarItem', 'NSSimpleCString', 'NSSocketPort', 'NSSortDescriptor', 'NSStream', 'NSString', 'NSStringDrawingContext', 'NSTextAttachment', 'NSTextCheckingResult', 'NSTextContainer', 'NSTextStorage', 'NSTextTab', 'NSThread', 'NSTimeZone', 'NSTimer', 'NSToolbarItem', 'NSURL', 'NSURLAuthenticationChallenge', 'NSURLCache', 'NSURLComponents', 'NSURLConnection', 'NSURLCredential', 'NSURLCredentialStorage', 'NSURLProtectionSpace', 'NSURLProtocol', 'NSURLQueryItem', 'NSURLRequest', 'NSURLResponse', 'NSURLSession', 'NSURLSessionConfiguration', 'NSURLSessionDataTask', 'NSURLSessionDownloadTask', 'NSURLSessionStreamTask', 'NSURLSessionTask', 'NSURLSessionTaskMetrics', 'NSURLSessionTaskTransactionMetrics', 'NSURLSessionUploadTask', 'NSURLSessionWebSocketMessage', 'NSURLSessionWebSocketTask', 'NSUUID', 'NSUbiquitousKeyValueStore', 'NSUndoManager', 'NSUnit', 'NSUnitAcceleration', 'NSUnitAngle', 'NSUnitArea', 'NSUnitConcentrationMass', 'NSUnitConverter', 'NSUnitConverterLinear', 'NSUnitDispersion', 'NSUnitDuration', 'NSUnitElectricCharge', 'NSUnitElectricCurrent', 'NSUnitElectricPotentialDifference', 'NSUnitElectricResistance', 'NSUnitEnergy', 'NSUnitFrequency', 'NSUnitFuelEfficiency', 'NSUnitIlluminance', 'NSUnitInformationStorage', 'NSUnitLength', 'NSUnitMass', 'NSUnitPower', 'NSUnitPressure', 'NSUnitSpeed', 'NSUnitTemperature', 'NSUnitVolume', 'NSUserActivity', 'NSUserDefaults', 'NSValue', 'NSValueTransformer', 'NSXMLParser', 'NSXPCCoder', 'NSXPCConnection', 'NSXPCInterface', 'NSXPCListener', 'NSXPCListenerEndpoint', 'NWBonjourServiceEndpoint', 'NWEndpoint', 'NWHostEndpoint', 'NWPath', 'NWTCPConnection', 'NWTLSParameters', 'NWUDPSession', 'OSLogEntry', 'OSLogEntryActivity', 'OSLogEntryBoundary', 'OSLogEntryLog', 'OSLogEntrySignpost', 'OSLogEnumerator', 'OSLogMessageComponent', 'OSLogPosition', 'OSLogStore', 'PDFAction', 'PDFActionGoTo', 'PDFActionNamed', 'PDFActionRemoteGoTo', 'PDFActionResetForm', 'PDFActionURL', 'PDFAnnotation', 'PDFAppearanceCharacteristics', 'PDFBorder', 'PDFDestination', 'PDFDocument', 'PDFOutline', 'PDFPage', 'PDFSelection', 'PDFThumbnailView', 'PDFView', 'PHAdjustmentData', 'PHAsset', 'PHAssetChangeRequest', 'PHAssetCollection', 'PHAssetCollectionChangeRequest', 'PHAssetCreationRequest', 'PHAssetResource', 'PHAssetResourceCreationOptions', 'PHAssetResourceManager', 'PHAssetResourceRequestOptions', 'PHCachingImageManager', 'PHChange', 'PHChangeRequest', 'PHCloudIdentifier', 'PHCollection', 'PHCollectionList', 'PHCollectionListChangeRequest', 'PHContentEditingInput', 'PHContentEditingInputRequestOptions', 'PHContentEditingOutput', 'PHEditingExtensionContext', 'PHFetchOptions', 'PHFetchResult', 'PHFetchResultChangeDetails', 'PHImageManager', 'PHImageRequestOptions', 'PHLivePhoto', 'PHLivePhotoEditingContext', 'PHLivePhotoRequestOptions', 'PHLivePhotoView', 'PHObject', 'PHObjectChangeDetails', 'PHObjectPlaceholder', 'PHPhotoLibrary', 'PHPickerConfiguration', 'PHPickerFilter', 'PHPickerResult', 'PHPickerViewController', 'PHProject', 'PHProjectChangeRequest', 'PHVideoRequestOptions', 'PKAddCarKeyPassConfiguration', 'PKAddPassButton', 'PKAddPassesViewController', 'PKAddPaymentPassRequest', 'PKAddPaymentPassRequestConfiguration', 'PKAddPaymentPassViewController', 'PKAddSecureElementPassConfiguration', 'PKAddSecureElementPassViewController', 'PKAddShareablePassConfiguration', 'PKBarcodeEventConfigurationRequest', 'PKBarcodeEventMetadataRequest', 'PKBarcodeEventMetadataResponse', 'PKBarcodeEventSignatureRequest', 'PKBarcodeEventSignatureResponse', 'PKCanvasView', 'PKContact', 'PKDisbursementAuthorizationController', 'PKDisbursementRequest', 'PKDisbursementVoucher', 'PKDrawing', 'PKEraserTool', 'PKFloatRange', 'PKInk', 'PKInkingTool', 'PKIssuerProvisioningExtensionHandler', 'PKIssuerProvisioningExtensionPassEntry', 'PKIssuerProvisioningExtensionPaymentPassEntry', 'PKIssuerProvisioningExtensionStatus', 'PKLabeledValue', 'PKLassoTool', 'PKObject', 'PKPass', 'PKPassLibrary', 'PKPayment', 'PKPaymentAuthorizationController', 'PKPaymentAuthorizationResult', 'PKPaymentAuthorizationViewController', 'PKPaymentButton', 'PKPaymentInformationEventExtension', 'PKPaymentMerchantSession', 'PKPaymentMethod', 'PKPaymentPass', 'PKPaymentRequest', 'PKPaymentRequestMerchantSessionUpdate', 'PKPaymentRequestPaymentMethodUpdate', 'PKPaymentRequestShippingContactUpdate', 'PKPaymentRequestShippingMethodUpdate', 'PKPaymentRequestUpdate', 'PKPaymentSummaryItem', 'PKPaymentToken', 'PKPushCredentials', 'PKPushPayload', 'PKPushRegistry', 'PKSecureElementPass', 'PKShareablePassMetadata', 'PKShippingMethod', 'PKStroke', 'PKStrokePath', 'PKStrokePoint', 'PKSuicaPassProperties', 'PKTool', 'PKToolPicker', 'PKTransitPassProperties', 'QLFileThumbnailRequest', 'QLPreviewController', 'QLThumbnailGenerationRequest', 'QLThumbnailGenerator', 'QLThumbnailProvider', 'QLThumbnailReply', 'QLThumbnailRepresentation', 'RPBroadcastActivityController', 'RPBroadcastActivityViewController', 'RPBroadcastConfiguration', 'RPBroadcastController', 'RPBroadcastHandler', 'RPBroadcastMP4ClipHandler', 'RPBroadcastSampleHandler', 'RPPreviewViewController', 'RPScreenRecorder', 'RPSystemBroadcastPickerView', 'SCNAccelerationConstraint', 'SCNAction', 'SCNAnimation', 'SCNAnimationEvent', 'SCNAnimationPlayer', 'SCNAudioPlayer', 'SCNAudioSource', 'SCNAvoidOccluderConstraint', 'SCNBillboardConstraint', 'SCNBox', 'SCNCamera', 'SCNCameraController', 'SCNCapsule', 'SCNCone', 'SCNConstraint', 'SCNCylinder', 'SCNDistanceConstraint', 'SCNFloor', 'SCNGeometry', 'SCNGeometryElement', 'SCNGeometrySource', 'SCNGeometryTessellator', 'SCNHitTestResult', 'SCNIKConstraint', 'SCNLevelOfDetail', 'SCNLight', 'SCNLookAtConstraint', 'SCNMaterial', 'SCNMaterialProperty', 'SCNMorpher', 'SCNNode', 'SCNParticlePropertyController', 'SCNParticleSystem', 'SCNPhysicsBallSocketJoint', 'SCNPhysicsBehavior', 'SCNPhysicsBody', 'SCNPhysicsConeTwistJoint', 'SCNPhysicsContact', 'SCNPhysicsField', 'SCNPhysicsHingeJoint', 'SCNPhysicsShape', 'SCNPhysicsSliderJoint', 'SCNPhysicsVehicle', 'SCNPhysicsVehicleWheel', 'SCNPhysicsWorld', 'SCNPlane', 'SCNProgram', 'SCNPyramid', 'SCNReferenceNode', 'SCNRenderer', 'SCNReplicatorConstraint', 'SCNScene', 'SCNSceneSource', 'SCNShape', 'SCNSkinner', 'SCNSliderConstraint', 'SCNSphere', 'SCNTechnique', 'SCNText', 'SCNTimingFunction', 'SCNTorus', 'SCNTransaction', 'SCNTransformConstraint', 'SCNTube', 'SCNView', 'SFAcousticFeature', 'SFAuthenticationSession', 'SFContentBlockerManager', 'SFContentBlockerState', 'SFSafariViewController', 'SFSafariViewControllerConfiguration', 'SFSpeechAudioBufferRecognitionRequest', 'SFSpeechRecognitionRequest', 'SFSpeechRecognitionResult', 'SFSpeechRecognitionTask', 'SFSpeechRecognizer', 'SFSpeechURLRecognitionRequest', 'SFTranscription', 'SFTranscriptionSegment', 'SFVoiceAnalytics', 'SK3DNode', 'SKAction', 'SKAdNetwork', 'SKArcadeService', 'SKAttribute', 'SKAttributeValue', 'SKAudioNode', 'SKCameraNode', 'SKCloudServiceController', 'SKCloudServiceSetupViewController', 'SKConstraint', 'SKCropNode', 'SKDownload', 'SKEffectNode', 'SKEmitterNode', 'SKFieldNode', 'SKKeyframeSequence', 'SKLabelNode', 'SKLightNode', 'SKMutablePayment', 'SKMutableTexture', 'SKNode', 'SKOverlay', 'SKOverlayAppClipConfiguration', 'SKOverlayAppConfiguration', 'SKOverlayConfiguration', 'SKOverlayTransitionContext', 'SKPayment', 'SKPaymentDiscount', 'SKPaymentQueue', 'SKPaymentTransaction', 'SKPhysicsBody', 'SKPhysicsContact', 'SKPhysicsJoint', 'SKPhysicsJointFixed', 'SKPhysicsJointLimit', 'SKPhysicsJointPin', 'SKPhysicsJointSliding', 'SKPhysicsJointSpring', 'SKPhysicsWorld', 'SKProduct', 'SKProductDiscount', 'SKProductStorePromotionController', 'SKProductSubscriptionPeriod', 'SKProductsRequest', 'SKProductsResponse', 'SKRange', 'SKReachConstraints', 'SKReceiptRefreshRequest', 'SKReferenceNode', 'SKRegion', 'SKRenderer', 'SKRequest', 'SKScene', 'SKShader', 'SKShapeNode', 'SKSpriteNode', 'SKStoreProductViewController', 'SKStoreReviewController', 'SKStorefront', 'SKTexture', 'SKTextureAtlas', 'SKTileDefinition', 'SKTileGroup', 'SKTileGroupRule', 'SKTileMapNode', 'SKTileSet', 'SKTransformNode', 'SKTransition', 'SKUniform', 'SKVideoNode', 'SKView', 'SKWarpGeometry', 'SKWarpGeometryGrid', 'SLComposeServiceViewController', 'SLComposeSheetConfigurationItem', 'SLComposeViewController', 'SLRequest', 'SNAudioFileAnalyzer', 'SNAudioStreamAnalyzer', 'SNClassification', 'SNClassificationResult', 'SNClassifySoundRequest', 'SRAmbientLightSample', 'SRApplicationUsage', 'SRDeletionRecord', 'SRDevice', 'SRDeviceUsageReport', 'SRFetchRequest', 'SRFetchResult', 'SRKeyboardMetrics', 'SRKeyboardProbabilityMetric', 'SRMessagesUsageReport', 'SRNotificationUsage', 'SRPhoneUsageReport', 'SRSensorReader', 'SRVisit', 'SRWebUsage', 'SRWristDetection', 'SSReadingList', 'STScreenTimeConfiguration', 'STScreenTimeConfigurationObserver', 'STWebHistory', 'STWebpageController', 'TKBERTLVRecord', 'TKCompactTLVRecord', 'TKSimpleTLVRecord', 'TKSmartCard', 'TKSmartCardATR', 'TKSmartCardATRInterfaceGroup', 'TKSmartCardPINFormat', 'TKSmartCardSlot', 'TKSmartCardSlotManager', 'TKSmartCardToken', 'TKSmartCardTokenDriver', 'TKSmartCardTokenSession', 'TKSmartCardUserInteraction', 'TKSmartCardUserInteractionForPINOperation', 'TKSmartCardUserInteractionForSecurePINChange', 'TKSmartCardUserInteractionForSecurePINVerification', 'TKTLVRecord', 'TKToken', 'TKTokenAuthOperation', 'TKTokenConfiguration', 'TKTokenDriver', 'TKTokenDriverConfiguration', 'TKTokenKeyAlgorithm', 'TKTokenKeyExchangeParameters', 'TKTokenKeychainCertificate', 'TKTokenKeychainContents', 'TKTokenKeychainItem', 'TKTokenKeychainKey', 'TKTokenPasswordAuthOperation', 'TKTokenSession', 'TKTokenSmartCardPINAuthOperation', 'TKTokenWatcher', 'TWRequest', 'TWTweetComposeViewController', 'UIAcceleration', 'UIAccelerometer', 'UIAccessibilityCustomAction', 'UIAccessibilityCustomRotor', 'UIAccessibilityCustomRotorItemResult', 'UIAccessibilityCustomRotorSearchPredicate', 'UIAccessibilityElement', 'UIAccessibilityLocationDescriptor', 'UIAction', 'UIActionSheet', 'UIActivity', 'UIActivityIndicatorView', 'UIActivityItemProvider', 'UIActivityItemsConfiguration', 'UIActivityViewController', 'UIAlertAction', 'UIAlertController', 'UIAlertView', 'UIApplication', 'UIApplicationShortcutIcon', 'UIApplicationShortcutItem', 'UIAttachmentBehavior', 'UIBackgroundConfiguration', 'UIBarAppearance', 'UIBarButtonItem', 'UIBarButtonItemAppearance', 'UIBarButtonItemGroup', 'UIBarButtonItemStateAppearance', 'UIBarItem', 'UIBezierPath', 'UIBlurEffect', 'UIButton', 'UICellAccessory', 'UICellAccessoryCheckmark', 'UICellAccessoryCustomView', 'UICellAccessoryDelete', 'UICellAccessoryDisclosureIndicator', 'UICellAccessoryInsert', 'UICellAccessoryLabel', 'UICellAccessoryMultiselect', 'UICellAccessoryOutlineDisclosure', 'UICellAccessoryReorder', 'UICellConfigurationState', 'UICloudSharingController', 'UICollectionLayoutListConfiguration', 'UICollectionReusableView', 'UICollectionView', 'UICollectionViewCell', 'UICollectionViewCellRegistration', 'UICollectionViewCompositionalLayout', 'UICollectionViewCompositionalLayoutConfiguration', 'UICollectionViewController', 'UICollectionViewDiffableDataSource', 'UICollectionViewDiffableDataSourceReorderingHandlers', 'UICollectionViewDiffableDataSourceSectionSnapshotHandlers', 'UICollectionViewDropPlaceholder', 'UICollectionViewDropProposal', 'UICollectionViewFlowLayout', 'UICollectionViewFlowLayoutInvalidationContext', 'UICollectionViewFocusUpdateContext', 'UICollectionViewLayout', 'UICollectionViewLayoutAttributes', 'UICollectionViewLayoutInvalidationContext', 'UICollectionViewListCell', 'UICollectionViewPlaceholder', 'UICollectionViewSupplementaryRegistration', 'UICollectionViewTransitionLayout', 'UICollectionViewUpdateItem', 'UICollisionBehavior', 'UIColor', 'UIColorPickerViewController', 'UIColorWell', 'UICommand', 'UICommandAlternate', 'UIContextMenuConfiguration', 'UIContextMenuInteraction', 'UIContextualAction', 'UIControl', 'UICubicTimingParameters', 'UIDatePicker', 'UIDeferredMenuElement', 'UIDevice', 'UIDictationPhrase', 'UIDocument', 'UIDocumentBrowserAction', 'UIDocumentBrowserTransitionController', 'UIDocumentBrowserViewController', 'UIDocumentInteractionController', 'UIDocumentMenuViewController', 'UIDocumentPickerExtensionViewController', 'UIDocumentPickerViewController', 'UIDragInteraction', 'UIDragItem', 'UIDragPreview', 'UIDragPreviewParameters', 'UIDragPreviewTarget', 'UIDropInteraction', 'UIDropProposal', 'UIDynamicAnimator', 'UIDynamicBehavior', 'UIDynamicItemBehavior', 'UIDynamicItemGroup', 'UIEvent', 'UIFeedbackGenerator', 'UIFieldBehavior', 'UIFocusAnimationCoordinator', 'UIFocusDebugger', 'UIFocusGuide', 'UIFocusMovementHint', 'UIFocusSystem', 'UIFocusUpdateContext', 'UIFont', 'UIFontDescriptor', 'UIFontMetrics', 'UIFontPickerViewController', 'UIFontPickerViewControllerConfiguration', 'UIGestureRecognizer', 'UIGraphicsImageRenderer', 'UIGraphicsImageRendererContext', 'UIGraphicsImageRendererFormat', 'UIGraphicsPDFRenderer', 'UIGraphicsPDFRendererContext', 'UIGraphicsPDFRendererFormat', 'UIGraphicsRenderer', 'UIGraphicsRendererContext', 'UIGraphicsRendererFormat', 'UIGravityBehavior', 'UIHoverGestureRecognizer', 'UIImage', 'UIImageAsset', 'UIImageConfiguration', 'UIImagePickerController', 'UIImageSymbolConfiguration', 'UIImageView', 'UIImpactFeedbackGenerator', 'UIIndirectScribbleInteraction', 'UIInputView', 'UIInputViewController', 'UIInterpolatingMotionEffect', 'UIKey', 'UIKeyCommand', 'UILabel', 'UILargeContentViewerInteraction', 'UILayoutGuide', 'UILexicon', 'UILexiconEntry', 'UIListContentConfiguration', 'UIListContentImageProperties', 'UIListContentTextProperties', 'UIListContentView', 'UILocalNotification', 'UILocalizedIndexedCollation', 'UILongPressGestureRecognizer', 'UIManagedDocument', 'UIMarkupTextPrintFormatter', 'UIMenu', 'UIMenuController', 'UIMenuElement', 'UIMenuItem', 'UIMenuSystem', 'UIMotionEffect', 'UIMotionEffectGroup', 'UIMutableApplicationShortcutItem', 'UIMutableUserNotificationAction', 'UIMutableUserNotificationCategory', 'UINavigationBar', 'UINavigationBarAppearance', 'UINavigationController', 'UINavigationItem', 'UINib', 'UINotificationFeedbackGenerator', 'UIOpenURLContext', 'UIPageControl', 'UIPageViewController', 'UIPanGestureRecognizer', 'UIPasteConfiguration', 'UIPasteboard', 'UIPencilInteraction', 'UIPercentDrivenInteractiveTransition', 'UIPickerView', 'UIPinchGestureRecognizer', 'UIPointerEffect', 'UIPointerHighlightEffect', 'UIPointerHoverEffect', 'UIPointerInteraction', 'UIPointerLiftEffect', 'UIPointerLockState', 'UIPointerRegion', 'UIPointerRegionRequest', 'UIPointerShape', 'UIPointerStyle', 'UIPopoverBackgroundView', 'UIPopoverController', 'UIPopoverPresentationController', 'UIPresentationController', 'UIPress', 'UIPressesEvent', 'UIPreviewAction', 'UIPreviewActionGroup', 'UIPreviewInteraction', 'UIPreviewParameters', 'UIPreviewTarget', 'UIPrintFormatter', 'UIPrintInfo', 'UIPrintInteractionController', 'UIPrintPageRenderer', 'UIPrintPaper', 'UIPrinter', 'UIPrinterPickerController', 'UIProgressView', 'UIPushBehavior', 'UIReferenceLibraryViewController', 'UIRefreshControl', 'UIRegion', 'UIResponder', 'UIRotationGestureRecognizer', 'UIScene', 'UISceneActivationConditions', 'UISceneActivationRequestOptions', 'UISceneConfiguration', 'UISceneConnectionOptions', 'UISceneDestructionRequestOptions', 'UISceneOpenExternalURLOptions', 'UISceneOpenURLOptions', 'UISceneSession', 'UISceneSizeRestrictions', 'UIScreen', 'UIScreenEdgePanGestureRecognizer', 'UIScreenMode', 'UIScreenshotService', 'UIScribbleInteraction', 'UIScrollView', 'UISearchBar', 'UISearchContainerViewController', 'UISearchController', 'UISearchDisplayController', 'UISearchSuggestionItem', 'UISearchTextField', 'UISearchToken', 'UISegmentedControl', 'UISelectionFeedbackGenerator', 'UISimpleTextPrintFormatter', 'UISlider', 'UISnapBehavior', 'UISplitViewController', 'UISpringLoadedInteraction', 'UISpringTimingParameters', 'UIStackView', 'UIStatusBarManager', 'UIStepper', 'UIStoryboard', 'UIStoryboardPopoverSegue', 'UIStoryboardSegue', 'UIStoryboardUnwindSegueSource', 'UISwipeActionsConfiguration', 'UISwipeGestureRecognizer', 'UISwitch', 'UITabBar', 'UITabBarAppearance', 'UITabBarController', 'UITabBarItem', 'UITabBarItemAppearance', 'UITabBarItemStateAppearance', 'UITableView', 'UITableViewCell', 'UITableViewController', 'UITableViewDiffableDataSource', 'UITableViewDropPlaceholder', 'UITableViewDropProposal', 'UITableViewFocusUpdateContext', 'UITableViewHeaderFooterView', 'UITableViewPlaceholder', 'UITableViewRowAction', 'UITapGestureRecognizer', 'UITargetedDragPreview', 'UITargetedPreview', 'UITextChecker', 'UITextDragPreviewRenderer', 'UITextDropProposal', 'UITextField', 'UITextFormattingCoordinator', 'UITextInputAssistantItem', 'UITextInputMode', 'UITextInputPasswordRules', 'UITextInputStringTokenizer', 'UITextInteraction', 'UITextPlaceholder', 'UITextPosition', 'UITextRange', 'UITextSelectionRect', 'UITextView', 'UITitlebar', 'UIToolbar', 'UIToolbarAppearance', 'UITouch', 'UITraitCollection', 'UIUserNotificationAction', 'UIUserNotificationCategory', 'UIUserNotificationSettings', 'UIVibrancyEffect', 'UIVideoEditorController', 'UIView', 'UIViewConfigurationState', 'UIViewController', 'UIViewPrintFormatter', 'UIViewPropertyAnimator', 'UIVisualEffect', 'UIVisualEffectView', 'UIWebView', 'UIWindow', 'UIWindowScene', 'UIWindowSceneDestructionRequestOptions', 'UNCalendarNotificationTrigger', 'UNLocationNotificationTrigger', 'UNMutableNotificationContent', 'UNNotification', 'UNNotificationAction', 'UNNotificationAttachment', 'UNNotificationCategory', 'UNNotificationContent', 'UNNotificationRequest', 'UNNotificationResponse', 'UNNotificationServiceExtension', 'UNNotificationSettings', 'UNNotificationSound', 'UNNotificationTrigger', 'UNPushNotificationTrigger', 'UNTextInputNotificationAction', 'UNTextInputNotificationResponse', 'UNTimeIntervalNotificationTrigger', 'UNUserNotificationCenter', 'UTType', 'VNBarcodeObservation', 'VNCircle', 'VNClassificationObservation', 'VNClassifyImageRequest', 'VNContour', 'VNContoursObservation', 'VNCoreMLFeatureValueObservation', 'VNCoreMLModel', 'VNCoreMLRequest', 'VNDetectBarcodesRequest', 'VNDetectContoursRequest', 'VNDetectFaceCaptureQualityRequest', 'VNDetectFaceLandmarksRequest', 'VNDetectFaceRectanglesRequest', 'VNDetectHorizonRequest', 'VNDetectHumanBodyPoseRequest', 'VNDetectHumanHandPoseRequest', 'VNDetectHumanRectanglesRequest', 'VNDetectRectanglesRequest', 'VNDetectTextRectanglesRequest', 'VNDetectTrajectoriesRequest', 'VNDetectedObjectObservation', 'VNDetectedPoint', 'VNDocumentCameraScan', 'VNDocumentCameraViewController', 'VNFaceLandmarkRegion', 'VNFaceLandmarkRegion2D', 'VNFaceLandmarks', 'VNFaceLandmarks2D', 'VNFaceObservation', 'VNFeaturePrintObservation', 'VNGenerateAttentionBasedSaliencyImageRequest', 'VNGenerateImageFeaturePrintRequest', 'VNGenerateObjectnessBasedSaliencyImageRequest', 'VNGenerateOpticalFlowRequest', 'VNGeometryUtils', 'VNHomographicImageRegistrationRequest', 'VNHorizonObservation', 'VNHumanBodyPoseObservation', 'VNHumanHandPoseObservation', 'VNImageAlignmentObservation', 'VNImageBasedRequest', 'VNImageHomographicAlignmentObservation', 'VNImageRegistrationRequest', 'VNImageRequestHandler', 'VNImageTranslationAlignmentObservation', 'VNObservation', 'VNPixelBufferObservation', 'VNPoint', 'VNRecognizeAnimalsRequest', 'VNRecognizeTextRequest', 'VNRecognizedObjectObservation', 'VNRecognizedPoint', 'VNRecognizedPointsObservation', 'VNRecognizedText', 'VNRecognizedTextObservation', 'VNRectangleObservation', 'VNRequest', 'VNSaliencyImageObservation', 'VNSequenceRequestHandler', 'VNStatefulRequest', 'VNTargetedImageRequest', 'VNTextObservation', 'VNTrackObjectRequest', 'VNTrackRectangleRequest', 'VNTrackingRequest', 'VNTrajectoryObservation', 'VNTranslationalImageRegistrationRequest', 'VNVector', 'VNVideoProcessor', 'VNVideoProcessorCadence', 'VNVideoProcessorFrameRateCadence', 'VNVideoProcessorRequestProcessingOptions', 'VNVideoProcessorTimeIntervalCadence', 'VSAccountApplicationProvider', 'VSAccountManager', 'VSAccountManagerResult', 'VSAccountMetadata', 'VSAccountMetadataRequest', 'VSAccountProviderResponse', 'VSSubscription', 'VSSubscriptionRegistrationCenter', 'WCSession', 'WCSessionFile', 'WCSessionFileTransfer', 'WCSessionUserInfoTransfer', 'WKBackForwardList', 'WKBackForwardListItem', 'WKContentRuleList', 'WKContentRuleListStore', 'WKContentWorld', 'WKContextMenuElementInfo', 'WKFindConfiguration', 'WKFindResult', 'WKFrameInfo', 'WKHTTPCookieStore', 'WKNavigation', 'WKNavigationAction', 'WKNavigationResponse', 'WKOpenPanelParameters', 'WKPDFConfiguration', 'WKPreferences', 'WKPreviewElementInfo', 'WKProcessPool', 'WKScriptMessage', 'WKSecurityOrigin', 'WKSnapshotConfiguration', 'WKUserContentController', 'WKUserScript', 'WKWebView', 'WKWebViewConfiguration', 'WKWebpagePreferences', 'WKWebsiteDataRecord', 'WKWebsiteDataStore', 'WKWindowFeatures', '__EntityAccessibilityWrapper'}
+COCOA_PROTOCOLS = {'ABNewPersonViewControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'ABPersonViewControllerDelegate', 'ABUnknownPersonViewControllerDelegate', 'ADActionViewControllerChildInterface', 'ADActionViewControllerInterface', 'ADBannerViewDelegate', 'ADInterstitialAdDelegate', 'AEAssessmentSessionDelegate', 'ARAnchorCopying', 'ARCoachingOverlayViewDelegate', 'ARSCNViewDelegate', 'ARSKViewDelegate', 'ARSessionDelegate', 'ARSessionObserver', 'ARSessionProviding', 'ARTrackable', 'ASAccountAuthenticationModificationControllerDelegate', 'ASAccountAuthenticationModificationControllerPresentationContextProviding', 'ASAuthorizationControllerDelegate', 'ASAuthorizationControllerPresentationContextProviding', 'ASAuthorizationCredential', 'ASAuthorizationProvider', 'ASAuthorizationProviderExtensionAuthorizationRequestHandler', 'ASWebAuthenticationPresentationContextProviding', 'ASWebAuthenticationSessionRequestDelegate', 'ASWebAuthenticationSessionWebBrowserSessionHandling', 'AUAudioUnitFactory', 'AVAssetDownloadDelegate', 'AVAssetResourceLoaderDelegate', 'AVAssetWriterDelegate', 'AVAsynchronousKeyValueLoading', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'AVCaptureDataOutputSynchronizerDelegate', 'AVCaptureDepthDataOutputDelegate', 'AVCaptureFileOutputDelegate', 'AVCaptureFileOutputRecordingDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'AVCapturePhotoCaptureDelegate', 'AVCapturePhotoFileDataRepresentationCustomizer', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'AVContentKeyRecipient', 'AVContentKeySessionDelegate', 'AVFragmentMinding', 'AVPictureInPictureControllerDelegate', 'AVPlayerItemLegibleOutputPushDelegate', 'AVPlayerItemMetadataCollectorPushDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'AVPlayerItemOutputPullDelegate', 'AVPlayerItemOutputPushDelegate', 'AVPlayerViewControllerDelegate', 'AVQueuedSampleBufferRendering', 'AVRoutePickerViewDelegate', 'AVVideoCompositing', 'AVVideoCompositionInstruction', 'AVVideoCompositionValidationHandling', 'AXCustomContentProvider', 'CAAction', 'CAAnimationDelegate', 'CALayerDelegate', 'CAMediaTiming', 'CAMetalDrawable', 'CBCentralManagerDelegate', 'CBPeripheralDelegate', 'CBPeripheralManagerDelegate', 'CHHapticAdvancedPatternPlayer', 'CHHapticDeviceCapability', 'CHHapticParameterAttributes', 'CHHapticPatternPlayer', 'CIAccordionFoldTransition', 'CIAffineClamp', 'CIAffineTile', 'CIAreaAverage', 'CIAreaHistogram', 'CIAreaMaximum', 'CIAreaMaximumAlpha', 'CIAreaMinMax', 'CIAreaMinMaxRed', 'CIAreaMinimum', 'CIAreaMinimumAlpha', 'CIAreaReductionFilter', 'CIAttributedTextImageGenerator', 'CIAztecCodeGenerator', 'CIBarcodeGenerator', 'CIBarsSwipeTransition', 'CIBicubicScaleTransform', 'CIBlendWithMask', 'CIBloom', 'CIBokehBlur', 'CIBoxBlur', 'CIBumpDistortion', 'CIBumpDistortionLinear', 'CICMYKHalftone', 'CICheckerboardGenerator', 'CICircleSplashDistortion', 'CICircularScreen', 'CICircularWrap', 'CICode128BarcodeGenerator', 'CIColorAbsoluteDifference', 'CIColorClamp', 'CIColorControls', 'CIColorCrossPolynomial', 'CIColorCube', 'CIColorCubeWithColorSpace', 'CIColorCubesMixedWithMask', 'CIColorCurves', 'CIColorInvert', 'CIColorMap', 'CIColorMatrix', 'CIColorMonochrome', 'CIColorPolynomial', 'CIColorPosterize', 'CIColorThreshold', 'CIColorThresholdOtsu', 'CIColumnAverage', 'CIComicEffect', 'CICompositeOperation', 'CIConvolution', 'CICopyMachineTransition', 'CICoreMLModel', 'CICrystallize', 'CIDepthOfField', 'CIDepthToDisparity', 'CIDiscBlur', 'CIDisintegrateWithMaskTransition', 'CIDisparityToDepth', 'CIDisplacementDistortion', 'CIDissolveTransition', 'CIDither', 'CIDocumentEnhancer', 'CIDotScreen', 'CIDroste', 'CIEdgePreserveUpsample', 'CIEdgeWork', 'CIEdges', 'CIEightfoldReflectedTile', 'CIExposureAdjust', 'CIFalseColor', 'CIFilter', 'CIFilterConstructor', 'CIFlashTransition', 'CIFourCoordinateGeometryFilter', 'CIFourfoldReflectedTile', 'CIFourfoldRotatedTile', 'CIFourfoldTranslatedTile', 'CIGaborGradients', 'CIGammaAdjust', 'CIGaussianBlur', 'CIGaussianGradient', 'CIGlassDistortion', 'CIGlassLozenge', 'CIGlideReflectedTile', 'CIGloom', 'CIHatchedScreen', 'CIHeightFieldFromMask', 'CIHexagonalPixellate', 'CIHighlightShadowAdjust', 'CIHistogramDisplay', 'CIHoleDistortion', 'CIHueAdjust', 'CIHueSaturationValueGradient', 'CIImageProcessorInput', 'CIImageProcessorOutput', 'CIKMeans', 'CIKaleidoscope', 'CIKeystoneCorrectionCombined', 'CIKeystoneCorrectionHorizontal', 'CIKeystoneCorrectionVertical', 'CILabDeltaE', 'CILanczosScaleTransform', 'CILenticularHaloGenerator', 'CILightTunnel', 'CILineOverlay', 'CILineScreen', 'CILinearGradient', 'CILinearToSRGBToneCurve', 'CIMaskToAlpha', 'CIMaskedVariableBlur', 'CIMaximumComponent', 'CIMedian', 'CIMeshGenerator', 'CIMinimumComponent', 'CIMix', 'CIModTransition', 'CIMorphologyGradient', 'CIMorphologyMaximum', 'CIMorphologyMinimum', 'CIMorphologyRectangleMaximum', 'CIMorphologyRectangleMinimum', 'CIMotionBlur', 'CINinePartStretched', 'CINinePartTiled', 'CINoiseReduction', 'CIOpTile', 'CIPDF417BarcodeGenerator', 'CIPageCurlTransition', 'CIPageCurlWithShadowTransition', 'CIPaletteCentroid', 'CIPalettize', 'CIParallelogramTile', 'CIPerspectiveCorrection', 'CIPerspectiveRotate', 'CIPerspectiveTile', 'CIPerspectiveTransform', 'CIPerspectiveTransformWithExtent', 'CIPhotoEffect', 'CIPinchDistortion', 'CIPixellate', 'CIPlugInRegistration', 'CIPointillize', 'CIQRCodeGenerator', 'CIRadialGradient', 'CIRandomGenerator', 'CIRippleTransition', 'CIRoundedRectangleGenerator', 'CIRowAverage', 'CISRGBToneCurveToLinear', 'CISaliencyMap', 'CISepiaTone', 'CIShadedMaterial', 'CISharpenLuminance', 'CISixfoldReflectedTile', 'CISixfoldRotatedTile', 'CISmoothLinearGradient', 'CISpotColor', 'CISpotLight', 'CIStarShineGenerator', 'CIStraighten', 'CIStretchCrop', 'CIStripesGenerator', 'CISunbeamsGenerator', 'CISwipeTransition', 'CITemperatureAndTint', 'CITextImageGenerator', 'CIThermal', 'CIToneCurve', 'CITorusLensDistortion', 'CITransitionFilter', 'CITriangleKaleidoscope', 'CITriangleTile', 'CITwelvefoldReflectedTile', 'CITwirlDistortion', 'CIUnsharpMask', 'CIVibrance', 'CIVignette', 'CIVignetteEffect', 'CIVortexDistortion', 'CIWhitePointAdjust', 'CIXRay', 'CIZoomBlur', 'CKRecordKeyValueSetting', 'CKRecordValue', 'CLKComplicationDataSource', 'CLLocationManagerDelegate', 'CLSContextProvider', 'CLSDataStoreDelegate', 'CMFallDetectionDelegate', 'CMHeadphoneMotionManagerDelegate', 'CNChangeHistoryEventVisitor', 'CNContactPickerDelegate', 'CNContactViewControllerDelegate', 'CNKeyDescriptor', 'CPApplicationDelegate', 'CPBarButtonProviding', 'CPInterfaceControllerDelegate', 'CPListTemplateDelegate', 'CPListTemplateItem', 'CPMapTemplateDelegate', 'CPNowPlayingTemplateObserver', 'CPPointOfInterestTemplateDelegate', 'CPSearchTemplateDelegate', 'CPSelectableListItem', 'CPSessionConfigurationDelegate', 'CPTabBarTemplateDelegate', 'CPTemplateApplicationDashboardSceneDelegate', 'CPTemplateApplicationSceneDelegate', 'CSSearchableIndexDelegate', 'CTSubscriberDelegate', 'CTTelephonyNetworkInfoDelegate', 'CXCallDirectoryExtensionContextDelegate', 'CXCallObserverDelegate', 'CXProviderDelegate', 'EAAccessoryDelegate', 'EAGLDrawable', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'EKCalendarChooserDelegate', 'EKEventEditViewDelegate', 'EKEventViewDelegate', 'GCDevice', 'GKAchievementViewControllerDelegate', 'GKAgentDelegate', 'GKChallengeEventHandlerDelegate', 'GKChallengeListener', 'GKFriendRequestComposeViewControllerDelegate', 'GKGameCenterControllerDelegate', 'GKGameModel', 'GKGameModelPlayer', 'GKGameModelUpdate', 'GKGameSessionEventListener', 'GKGameSessionSharingViewControllerDelegate', 'GKInviteEventListener', 'GKLeaderboardViewControllerDelegate', 'GKLocalPlayerListener', 'GKMatchDelegate', 'GKMatchmakerViewControllerDelegate', 'GKPeerPickerControllerDelegate', 'GKRandom', 'GKSavedGameListener', 'GKSceneRootNodeType', 'GKSessionDelegate', 'GKStrategist', 'GKTurnBasedEventListener', 'GKTurnBasedMatchmakerViewControllerDelegate', 'GKVoiceChatClient', 'GLKNamedEffect', 'GLKViewControllerDelegate', 'GLKViewDelegate', 'HKLiveWorkoutBuilderDelegate', 'HKWorkoutSessionDelegate', 'HMAccessoryBrowserDelegate', 'HMAccessoryDelegate', 'HMCameraSnapshotControlDelegate', 'HMCameraStreamControlDelegate', 'HMHomeDelegate', 'HMHomeManagerDelegate', 'HMNetworkConfigurationProfileDelegate', 'ICCameraDeviceDelegate', 'ICCameraDeviceDownloadDelegate', 'ICDeviceBrowserDelegate', 'ICDeviceDelegate', 'ICScannerDeviceDelegate', 'ILMessageFilterQueryHandling', 'INActivateCarSignalIntentHandling', 'INAddMediaIntentHandling', 'INAddTasksIntentHandling', 'INAppendToNoteIntentHandling', 'INBookRestaurantReservationIntentHandling', 'INCallsDomainHandling', 'INCancelRideIntentHandling', 'INCancelWorkoutIntentHandling', 'INCarCommandsDomainHandling', 'INCarPlayDomainHandling', 'INCreateNoteIntentHandling', 'INCreateTaskListIntentHandling', 'INDeleteTasksIntentHandling', 'INEndWorkoutIntentHandling', 'INGetAvailableRestaurantReservationBookingDefaultsIntentHandling', 'INGetAvailableRestaurantReservationBookingsIntentHandling', 'INGetCarLockStatusIntentHandling', 'INGetCarPowerLevelStatusIntentHandling', 'INGetCarPowerLevelStatusIntentResponseObserver', 'INGetRestaurantGuestIntentHandling', 'INGetRideStatusIntentHandling', 'INGetRideStatusIntentResponseObserver', 'INGetUserCurrentRestaurantReservationBookingsIntentHandling', 'INGetVisualCodeIntentHandling', 'INIntentHandlerProviding', 'INListCarsIntentHandling', 'INListRideOptionsIntentHandling', 'INMessagesDomainHandling', 'INNotebookDomainHandling', 'INPauseWorkoutIntentHandling', 'INPayBillIntentHandling', 'INPaymentsDomainHandling', 'INPhotosDomainHandling', 'INPlayMediaIntentHandling', 'INRadioDomainHandling', 'INRequestPaymentIntentHandling', 'INRequestRideIntentHandling', 'INResumeWorkoutIntentHandling', 'INRidesharingDomainHandling', 'INSaveProfileInCarIntentHandling', 'INSearchCallHistoryIntentHandling', 'INSearchForAccountsIntentHandling', 'INSearchForBillsIntentHandling', 'INSearchForMediaIntentHandling', 'INSearchForMessagesIntentHandling', 'INSearchForNotebookItemsIntentHandling', 'INSearchForPhotosIntentHandling', 'INSendMessageIntentHandling', 'INSendPaymentIntentHandling', 'INSendRideFeedbackIntentHandling', 'INSetAudioSourceInCarIntentHandling', 'INSetCarLockStatusIntentHandling', 'INSetClimateSettingsInCarIntentHandling', 'INSetDefrosterSettingsInCarIntentHandling', 'INSetMessageAttributeIntentHandling', 'INSetProfileInCarIntentHandling', 'INSetRadioStationIntentHandling', 'INSetSeatSettingsInCarIntentHandling', 'INSetTaskAttributeIntentHandling', 'INSnoozeTasksIntentHandling', 'INSpeakable', 'INStartAudioCallIntentHandling', 'INStartCallIntentHandling', 'INStartPhotoPlaybackIntentHandling', 'INStartVideoCallIntentHandling', 'INStartWorkoutIntentHandling', 'INTransferMoneyIntentHandling', 'INUIAddVoiceShortcutButtonDelegate', 'INUIAddVoiceShortcutViewControllerDelegate', 'INUIEditVoiceShortcutViewControllerDelegate', 'INUIHostedViewControlling', 'INUIHostedViewSiriProviding', 'INUpdateMediaAffinityIntentHandling', 'INVisualCodeDomainHandling', 'INWorkoutsDomainHandling', 'JSExport', 'MCAdvertiserAssistantDelegate', 'MCBrowserViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MCNearbyServiceBrowserDelegate', 'MCSessionDelegate', 'MDLAssetResolver', 'MDLComponent', 'MDLJointAnimation', 'MDLLightProbeIrradianceDataSource', 'MDLMeshBuffer', 'MDLMeshBufferAllocator', 'MDLMeshBufferZone', 'MDLNamed', 'MDLObjectContainerComponent', 'MDLTransformComponent', 'MDLTransformOp', 'MFMailComposeViewControllerDelegate', 'MFMessageComposeViewControllerDelegate', 'MIDICIProfileResponderDelegate', 'MKAnnotation', 'MKGeoJSONObject', 'MKLocalSearchCompleterDelegate', 'MKMapViewDelegate', 'MKOverlay', 'MKReverseGeocoderDelegate', 'MLBatchProvider', 'MLCustomLayer', 'MLCustomModel', 'MLFeatureProvider', 'MLWritable', 'MPMediaPickerControllerDelegate', 'MPMediaPlayback', 'MPNowPlayingSessionDelegate', 'MPPlayableContentDataSource', 'MPPlayableContentDelegate', 'MPSystemMusicPlayerController', 'MSAuthenticationPresentationContext', 'MSMessagesAppTranscriptPresentation', 'MSStickerBrowserViewDataSource', 'MTKViewDelegate', 'MTLAccelerationStructure', 'MTLAccelerationStructureCommandEncoder', 'MTLArgumentEncoder', 'MTLBinaryArchive', 'MTLBlitCommandEncoder', 'MTLBuffer', 'MTLCaptureScope', 'MTLCommandBuffer', 'MTLCommandBufferEncoderInfo', 'MTLCommandEncoder', 'MTLCommandQueue', 'MTLComputeCommandEncoder', 'MTLComputePipelineState', 'MTLCounter', 'MTLCounterSampleBuffer', 'MTLCounterSet', 'MTLDepthStencilState', 'MTLDevice', 'MTLDrawable', 'MTLDynamicLibrary', 'MTLEvent', 'MTLFence', 'MTLFunction', 'MTLFunctionHandle', 'MTLFunctionLog', 'MTLFunctionLogDebugLocation', 'MTLHeap', 'MTLIndirectCommandBuffer', 'MTLIndirectComputeCommand', 'MTLIndirectComputeCommandEncoder', 'MTLIndirectRenderCommand', 'MTLIndirectRenderCommandEncoder', 'MTLIntersectionFunctionTable', 'MTLLibrary', 'MTLLogContainer', 'MTLParallelRenderCommandEncoder', 'MTLRasterizationRateMap', 'MTLRenderCommandEncoder', 'MTLRenderPipelineState', 'MTLResource', 'MTLResourceStateCommandEncoder', 'MTLSamplerState', 'MTLSharedEvent', 'MTLTexture', 'MTLVisibleFunctionTable', 'MXMetricManagerSubscriber', 'MyClassJavaScriptMethods', 'NCWidgetProviding', 'NEAppPushDelegate', 'NFCFeliCaTag', 'NFCISO15693Tag', 'NFCISO7816Tag', 'NFCMiFareTag', 'NFCNDEFReaderSessionDelegate', 'NFCNDEFTag', 'NFCReaderSession', 'NFCReaderSessionDelegate', 'NFCTag', 'NFCTagReaderSessionDelegate', 'NFCVASReaderSessionDelegate', 'NISessionDelegate', 'NSCacheDelegate', 'NSCoding', 'NSCollectionLayoutContainer', 'NSCollectionLayoutEnvironment', 'NSCollectionLayoutVisibleItem', 'NSCopying', 'NSDecimalNumberBehaviors', 'NSDiscardableContent', 'NSExtensionRequestHandling', 'NSFastEnumeration', 'NSFetchRequestResult', 'NSFetchedResultsControllerDelegate', 'NSFetchedResultsSectionInfo', 'NSFileManagerDelegate', 'NSFilePresenter', 'NSFileProviderChangeObserver', 'NSFileProviderEnumerationObserver', 'NSFileProviderEnumerator', 'NSFileProviderItem', 'NSFileProviderServiceSource', 'NSItemProviderReading', 'NSItemProviderWriting', 'NSKeyedArchiverDelegate', 'NSKeyedUnarchiverDelegate', 'NSLayoutManagerDelegate', 'NSLocking', 'NSMachPortDelegate', 'NSMetadataQueryDelegate', 'NSMutableCopying', 'NSNetServiceBrowserDelegate', 'NSNetServiceDelegate', 'NSPortDelegate', 'NSProgressReporting', 'NSSecureCoding', 'NSStreamDelegate', 'NSTextAttachmentContainer', 'NSTextLayoutOrientationProvider', 'NSTextStorageDelegate', 'NSURLAuthenticationChallengeSender', 'NSURLConnectionDataDelegate', 'NSURLConnectionDelegate', 'NSURLConnectionDownloadDelegate', 'NSURLProtocolClient', 'NSURLSessionDataDelegate', 'NSURLSessionDelegate', 'NSURLSessionDownloadDelegate', 'NSURLSessionStreamDelegate', 'NSURLSessionTaskDelegate', 'NSURLSessionWebSocketDelegate', 'NSUserActivityDelegate', 'NSXMLParserDelegate', 'NSXPCListenerDelegate', 'NSXPCProxyCreating', 'NWTCPConnectionAuthenticationDelegate', 'OSLogEntryFromProcess', 'OSLogEntryWithPayload', 'PDFDocumentDelegate', 'PDFViewDelegate', 'PHContentEditingController', 'PHLivePhotoFrame', 'PHLivePhotoViewDelegate', 'PHPhotoLibraryAvailabilityObserver', 'PHPhotoLibraryChangeObserver', 'PHPickerViewControllerDelegate', 'PKAddPassesViewControllerDelegate', 'PKAddPaymentPassViewControllerDelegate', 'PKAddSecureElementPassViewControllerDelegate', 'PKCanvasViewDelegate', 'PKDisbursementAuthorizationControllerDelegate', 'PKIssuerProvisioningExtensionAuthorizationProviding', 'PKPaymentAuthorizationControllerDelegate', 'PKPaymentAuthorizationViewControllerDelegate', 'PKPaymentInformationRequestHandling', 'PKPushRegistryDelegate', 'PKToolPickerObserver', 'PreviewDisplaying', 'QLPreviewControllerDataSource', 'QLPreviewControllerDelegate', 'QLPreviewItem', 'QLPreviewingController', 'RPBroadcastActivityControllerDelegate', 'RPBroadcastActivityViewControllerDelegate', 'RPBroadcastControllerDelegate', 'RPPreviewViewControllerDelegate', 'RPScreenRecorderDelegate', 'SCNActionable', 'SCNAnimatable', 'SCNAnimation', 'SCNAvoidOccluderConstraintDelegate', 'SCNBoundingVolume', 'SCNBufferStream', 'SCNCameraControlConfiguration', 'SCNCameraControllerDelegate', 'SCNNodeRendererDelegate', 'SCNPhysicsContactDelegate', 'SCNProgramDelegate', 'SCNSceneExportDelegate', 'SCNSceneRenderer', 'SCNSceneRendererDelegate', 'SCNShadable', 'SCNTechniqueSupport', 'SFSafariViewControllerDelegate', 'SFSpeechRecognitionTaskDelegate', 'SFSpeechRecognizerDelegate', 'SKCloudServiceSetupViewControllerDelegate', 'SKOverlayDelegate', 'SKPaymentQueueDelegate', 'SKPaymentTransactionObserver', 'SKPhysicsContactDelegate', 'SKProductsRequestDelegate', 'SKRequestDelegate', 'SKSceneDelegate', 'SKStoreProductViewControllerDelegate', 'SKViewDelegate', 'SKWarpable', 'SNRequest', 'SNResult', 'SNResultsObserving', 'SRSensorReaderDelegate', 'TKSmartCardTokenDriverDelegate', 'TKSmartCardUserInteractionDelegate', 'TKTokenDelegate', 'TKTokenDriverDelegate', 'TKTokenSessionDelegate', 'UIAccelerometerDelegate', 'UIAccessibilityContainerDataTable', 'UIAccessibilityContainerDataTableCell', 'UIAccessibilityContentSizeCategoryImageAdjusting', 'UIAccessibilityIdentification', 'UIAccessibilityReadingContent', 'UIActionSheetDelegate', 'UIActivityItemSource', 'UIActivityItemsConfigurationReading', 'UIAdaptivePresentationControllerDelegate', 'UIAlertViewDelegate', 'UIAppearance', 'UIAppearanceContainer', 'UIApplicationDelegate', 'UIBarPositioning', 'UIBarPositioningDelegate', 'UICloudSharingControllerDelegate', 'UICollectionViewDataSource', 'UICollectionViewDataSourcePrefetching', 'UICollectionViewDelegate', 'UICollectionViewDelegateFlowLayout', 'UICollectionViewDragDelegate', 'UICollectionViewDropCoordinator', 'UICollectionViewDropDelegate', 'UICollectionViewDropItem', 'UICollectionViewDropPlaceholderContext', 'UICollisionBehaviorDelegate', 'UIColorPickerViewControllerDelegate', 'UIConfigurationState', 'UIContentConfiguration', 'UIContentContainer', 'UIContentSizeCategoryAdjusting', 'UIContentView', 'UIContextMenuInteractionAnimating', 'UIContextMenuInteractionCommitAnimating', 'UIContextMenuInteractionDelegate', 'UICoordinateSpace', 'UIDataSourceModelAssociation', 'UIDataSourceTranslating', 'UIDocumentBrowserViewControllerDelegate', 'UIDocumentInteractionControllerDelegate', 'UIDocumentMenuDelegate', 'UIDocumentPickerDelegate', 'UIDragAnimating', 'UIDragDropSession', 'UIDragInteractionDelegate', 'UIDragSession', 'UIDropInteractionDelegate', 'UIDropSession', 'UIDynamicAnimatorDelegate', 'UIDynamicItem', 'UIFocusAnimationContext', 'UIFocusDebuggerOutput', 'UIFocusEnvironment', 'UIFocusItem', 'UIFocusItemContainer', 'UIFocusItemScrollableContainer', 'UIFontPickerViewControllerDelegate', 'UIGestureRecognizerDelegate', 'UIGuidedAccessRestrictionDelegate', 'UIImageConfiguration', 'UIImagePickerControllerDelegate', 'UIIndirectScribbleInteractionDelegate', 'UIInputViewAudioFeedback', 'UIInteraction', 'UIItemProviderPresentationSizeProviding', 'UIKeyInput', 'UILargeContentViewerInteractionDelegate', 'UILargeContentViewerItem', 'UILayoutSupport', 'UIMenuBuilder', 'UINavigationBarDelegate', 'UINavigationControllerDelegate', 'UIObjectRestoration', 'UIPageViewControllerDataSource', 'UIPageViewControllerDelegate', 'UIPasteConfigurationSupporting', 'UIPencilInteractionDelegate', 'UIPickerViewAccessibilityDelegate', 'UIPickerViewDataSource', 'UIPickerViewDelegate', 'UIPointerInteractionAnimating', 'UIPointerInteractionDelegate', 'UIPopoverBackgroundViewMethods', 'UIPopoverControllerDelegate', 'UIPopoverPresentationControllerDelegate', 'UIPreviewActionItem', 'UIPreviewInteractionDelegate', 'UIPrintInteractionControllerDelegate', 'UIPrinterPickerControllerDelegate', 'UIResponderStandardEditActions', 'UISceneDelegate', 'UIScreenshotServiceDelegate', 'UIScribbleInteractionDelegate', 'UIScrollViewAccessibilityDelegate', 'UIScrollViewDelegate', 'UISearchBarDelegate', 'UISearchControllerDelegate', 'UISearchDisplayDelegate', 'UISearchResultsUpdating', 'UISearchSuggestion', 'UISearchTextFieldDelegate', 'UISearchTextFieldPasteItem', 'UISplitViewControllerDelegate', 'UISpringLoadedInteractionBehavior', 'UISpringLoadedInteractionContext', 'UISpringLoadedInteractionEffect', 'UISpringLoadedInteractionSupporting', 'UIStateRestoring', 'UITabBarControllerDelegate', 'UITabBarDelegate', 'UITableViewDataSource', 'UITableViewDataSourcePrefetching', 'UITableViewDelegate', 'UITableViewDragDelegate', 'UITableViewDropCoordinator', 'UITableViewDropDelegate', 'UITableViewDropItem', 'UITableViewDropPlaceholderContext', 'UITextDocumentProxy', 'UITextDragDelegate', 'UITextDragRequest', 'UITextDraggable', 'UITextDropDelegate', 'UITextDropRequest', 'UITextDroppable', 'UITextFieldDelegate', 'UITextFormattingCoordinatorDelegate', 'UITextInput', 'UITextInputDelegate', 'UITextInputTokenizer', 'UITextInputTraits', 'UITextInteractionDelegate', 'UITextPasteConfigurationSupporting', 'UITextPasteDelegate', 'UITextPasteItem', 'UITextSelecting', 'UITextViewDelegate', 'UITimingCurveProvider', 'UIToolbarDelegate', 'UITraitEnvironment', 'UIUserActivityRestoring', 'UIVideoEditorControllerDelegate', 'UIViewAnimating', 'UIViewControllerAnimatedTransitioning', 'UIViewControllerContextTransitioning', 'UIViewControllerInteractiveTransitioning', 'UIViewControllerPreviewing', 'UIViewControllerPreviewingDelegate', 'UIViewControllerRestoration', 'UIViewControllerTransitionCoordinator', 'UIViewControllerTransitionCoordinatorContext', 'UIViewControllerTransitioningDelegate', 'UIViewImplicitlyAnimating', 'UIWebViewDelegate', 'UIWindowSceneDelegate', 'UNNotificationContentExtension', 'UNUserNotificationCenterDelegate', 'VNDocumentCameraViewControllerDelegate', 'VNFaceObservationAccepting', 'VNRequestProgressProviding', 'VNRequestRevisionProviding', 'VSAccountManagerDelegate', 'WCSessionDelegate', 'WKHTTPCookieStoreObserver', 'WKNavigationDelegate', 'WKPreviewActionItem', 'WKScriptMessageHandler', 'WKScriptMessageHandlerWithReply', 'WKUIDelegate', 'WKURLSchemeHandler', 'WKURLSchemeTask'}
+COCOA_PRIMITIVES = {'ACErrorCode', 'ALCcontext_struct', 'ALCdevice_struct', 'ALMXGlyphEntry', 'ALMXHeader', 'API_UNAVAILABLE', 'AUChannelInfo', 'AUDependentParameter', 'AUDistanceAttenuationData', 'AUHostIdentifier', 'AUHostVersionIdentifier', 'AUInputSamplesInOutputCallbackStruct', 'AUMIDIEvent', 'AUMIDIOutputCallbackStruct', 'AUNodeInteraction', 'AUNodeRenderCallback', 'AUNumVersion', 'AUParameterAutomationEvent', 'AUParameterEvent', 'AUParameterMIDIMapping', 'AUPreset', 'AUPresetEvent', 'AURecordedParameterEvent', 'AURenderCallbackStruct', 'AURenderEventHeader', 'AUSamplerBankPresetData', 'AUSamplerInstrumentData', 'AnchorPoint', 'AnchorPointTable', 'AnkrTable', 'AudioBalanceFade', 'AudioBuffer', 'AudioBufferList', 'AudioBytePacketTranslation', 'AudioChannelDescription', 'AudioChannelLayout', 'AudioClassDescription', 'AudioCodecMagicCookieInfo', 'AudioCodecPrimeInfo', 'AudioComponentDescription', 'AudioComponentPlugInInterface', 'AudioConverterPrimeInfo', 'AudioFileMarker', 'AudioFileMarkerList', 'AudioFilePacketTableInfo', 'AudioFileRegion', 'AudioFileRegionList', 'AudioFileTypeAndFormatID', 'AudioFile_SMPTE_Time', 'AudioFormatInfo', 'AudioFormatListItem', 'AudioFramePacketTranslation', 'AudioIndependentPacketTranslation', 'AudioOutputUnitMIDICallbacks', 'AudioOutputUnitStartAtTimeParams', 'AudioPacketDependencyInfoTranslation', 'AudioPacketRangeByteCountTranslation', 'AudioPacketRollDistanceTranslation', 'AudioPanningInfo', 'AudioQueueBuffer', 'AudioQueueChannelAssignment', 'AudioQueueLevelMeterState', 'AudioQueueParameterEvent', 'AudioStreamBasicDescription', 'AudioStreamPacketDescription', 'AudioTimeStamp', 'AudioUnitCocoaViewInfo', 'AudioUnitConnection', 'AudioUnitExternalBuffer', 'AudioUnitFrequencyResponseBin', 'AudioUnitMIDIControlMapping', 'AudioUnitMeterClipping', 'AudioUnitNodeConnection', 'AudioUnitOtherPluginDesc', 'AudioUnitParameter', 'AudioUnitParameterEvent', 'AudioUnitParameterHistoryInfo', 'AudioUnitParameterInfo', 'AudioUnitParameterNameInfo', 'AudioUnitParameterStringFromValue', 'AudioUnitParameterValueFromString', 'AudioUnitParameterValueName', 'AudioUnitParameterValueTranslation', 'AudioUnitPresetMAS_SettingData', 'AudioUnitPresetMAS_Settings', 'AudioUnitProperty', 'AudioUnitRenderContext', 'AudioValueRange', 'AudioValueTranslation', 'AuthorizationOpaqueRef', 'BslnFormat0Part', 'BslnFormat1Part', 'BslnFormat2Part', 'BslnFormat3Part', 'BslnTable', 'CABarBeatTime', 'CAFAudioDescription', 'CAFChunkHeader', 'CAFDataChunk', 'CAFFileHeader', 'CAFInfoStrings', 'CAFInstrumentChunk', 'CAFMarker', 'CAFMarkerChunk', 'CAFOverviewChunk', 'CAFOverviewSample', 'CAFPacketTableHeader', 'CAFPeakChunk', 'CAFPositionPeak', 'CAFRegion', 'CAFRegionChunk', 'CAFStringID', 'CAFStrings', 'CAFUMIDChunk', 'CAF_SMPTE_Time', 'CAF_UUID_ChunkHeader', 'CA_BOXABLE', 'CFHostClientContext', 'CFNetServiceClientContext', 'CF_BRIDGED_MUTABLE_TYPE', 'CF_BRIDGED_TYPE', 'CF_RELATED_TYPE', 'CGAffineTransform', 'CGDataConsumerCallbacks', 'CGDataProviderDirectCallbacks', 'CGDataProviderSequentialCallbacks', 'CGFunctionCallbacks', 'CGPDFArray', 'CGPDFContentStream', 'CGPDFDictionary', 'CGPDFObject', 'CGPDFOperatorTable', 'CGPDFScanner', 'CGPDFStream', 'CGPDFString', 'CGPathElement', 'CGPatternCallbacks', 'CGVector', 'CG_BOXABLE', 'CLLocationCoordinate2D', 'CM_BRIDGED_TYPE', 'CTParagraphStyleSetting', 'CVPlanarComponentInfo', 'CVPlanarPixelBufferInfo', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'CVSMPTETime', 'CV_BRIDGED_TYPE', 'ComponentInstanceRecord', 'ExtendedAudioFormatInfo', 'ExtendedControlEvent', 'ExtendedNoteOnEvent', 'ExtendedTempoEvent', 'FontVariation', 'GCQuaternion', 'GKBox', 'GKQuad', 'GKTriangle', 'GLKEffectPropertyPrv', 'HostCallbackInfo', 'IIO_BRIDGED_TYPE', 'IUnknownVTbl', 'JustDirectionTable', 'JustPCAction', 'JustPCActionSubrecord', 'JustPCConditionalAddAction', 'JustPCDecompositionAction', 'JustPCDuctilityAction', 'JustPCGlyphRepeatAddAction', 'JustPostcompTable', 'JustTable', 'JustWidthDeltaEntry', 'JustWidthDeltaGroup', 'KernIndexArrayHeader', 'KernKerningPair', 'KernOffsetTable', 'KernOrderedListEntry', 'KernOrderedListHeader', 'KernSimpleArrayHeader', 'KernStateEntry', 'KernStateHeader', 'KernSubtableHeader', 'KernTableHeader', 'KernVersion0Header', 'KernVersion0SubtableHeader', 'KerxAnchorPointAction', 'KerxControlPointAction', 'KerxControlPointEntry', 'KerxControlPointHeader', 'KerxCoordinateAction', 'KerxIndexArrayHeader', 'KerxKerningPair', 'KerxOrderedListEntry', 'KerxOrderedListHeader', 'KerxSimpleArrayHeader', 'KerxStateEntry', 'KerxStateHeader', 'KerxSubtableHeader', 'KerxTableHeader', 'LcarCaretClassEntry', 'LcarCaretTable', 'LtagStringRange', 'LtagTable', 'MDL_CLASS_EXPORT', 'MIDICIDeviceIdentification', 'MIDIChannelMessage', 'MIDIControlTransform', 'MIDIDriverInterface', 'MIDIEventList', 'MIDIEventPacket', 'MIDIIOErrorNotification', 'MIDIMessage_128', 'MIDIMessage_64', 'MIDIMessage_96', 'MIDIMetaEvent', 'MIDINoteMessage', 'MIDINotification', 'MIDIObjectAddRemoveNotification', 'MIDIObjectPropertyChangeNotification', 'MIDIPacket', 'MIDIPacketList', 'MIDIRawData', 'MIDISysexSendRequest', 'MIDIThruConnectionEndpoint', 'MIDIThruConnectionParams', 'MIDITransform', 'MIDIValueMap', 'MPSDeviceOptions', 'MixerDistanceParams', 'MortChain', 'MortContextualSubtable', 'MortFeatureEntry', 'MortInsertionSubtable', 'MortLigatureSubtable', 'MortRearrangementSubtable', 'MortSubtable', 'MortSwashSubtable', 'MortTable', 'MorxChain', 'MorxContextualSubtable', 'MorxInsertionSubtable', 'MorxLigatureSubtable', 'MorxRearrangementSubtable', 'MorxSubtable', 'MorxTable', 'MusicDeviceNoteParams', 'MusicDeviceStdNoteParams', 'MusicEventUserData', 'MusicTrackLoopInfo', 'NoteParamsControlValue', 'OpaqueAudioComponent', 'OpaqueAudioComponentInstance', 'OpaqueAudioConverter', 'OpaqueAudioQueue', 'OpaqueAudioQueueProcessingTap', 'OpaqueAudioQueueTimeline', 'OpaqueExtAudioFile', 'OpaqueJSClass', 'OpaqueJSContext', 'OpaqueJSContextGroup', 'OpaqueJSPropertyNameAccumulator', 'OpaqueJSPropertyNameArray', 'OpaqueJSString', 'OpaqueJSValue', 'OpaqueMusicEventIterator', 'OpaqueMusicPlayer', 'OpaqueMusicSequence', 'OpaqueMusicTrack', 'OpbdSideValues', 'OpbdTable', 'ParameterEvent', 'PropLookupSegment', 'PropLookupSingle', 'PropTable', 'ROTAGlyphEntry', 'ROTAHeader', 'SCNMatrix4', 'SCNVector3', 'SCNVector4', 'SFNTLookupArrayHeader', 'SFNTLookupBinarySearchHeader', 'SFNTLookupSegment', 'SFNTLookupSegmentHeader', 'SFNTLookupSingle', 'SFNTLookupSingleHeader', 'SFNTLookupTable', 'SFNTLookupTrimmedArrayHeader', 'SFNTLookupVectorHeader', 'SMPTETime', 'STClassTable', 'STEntryOne', 'STEntryTwo', 'STEntryZero', 'STHeader', 'STXEntryOne', 'STXEntryTwo', 'STXEntryZero', 'STXHeader', 'ScheduledAudioFileRegion', 'ScheduledAudioSlice', 'SecKeychainAttribute', 'SecKeychainAttributeInfo', 'SecKeychainAttributeList', 'TrakTable', 'TrakTableData', 'TrakTableEntry', 'UIAccessibility', 'VTDecompressionOutputCallbackRecord', 'VTInt32Point', 'VTInt32Size', '_CFHTTPAuthentication', '_GLKMatrix2', '_GLKMatrix3', '_GLKMatrix4', '_GLKQuaternion', '_GLKVector2', '_GLKVector3', '_GLKVector4', '_GLKVertexAttributeParameters', '_MTLAxisAlignedBoundingBox', '_MTLPackedFloat3', '_MTLPackedFloat4x3', '_NSRange', '_NSZone', '__CFHTTPMessage', '__CFHost', '__CFNetDiagnostic', '__CFNetService', '__CFNetServiceBrowser', '__CFNetServiceMonitor', '__CFXMLNode', '__CFXMLParser', '__GLsync', '__SecAccess', '__SecCertificate', '__SecIdentity', '__SecKey', '__SecRandom', '__attribute__', 'gss_OID_desc_struct', 'gss_OID_set_desc_struct', 'gss_auth_identity', 'gss_buffer_desc_struct', 'gss_buffer_set_desc_struct', 'gss_channel_bindings_struct', 'gss_cred_id_t_desc_struct', 'gss_ctx_id_t_desc_struct', 'gss_iov_buffer_desc_struct', 'gss_krb5_cfx_keydata', 'gss_krb5_lucid_context_v1', 'gss_krb5_lucid_context_version', 'gss_krb5_lucid_key', 'gss_krb5_rfc1964_keydata', 'gss_name_t_desc_struct', 'opaqueCMBufferQueueTriggerToken', 'sfntCMapEncoding', 'sfntCMapExtendedSubHeader', 'sfntCMapHeader', 'sfntCMapSubHeader', 'sfntDescriptorHeader', 'sfntDirectory', 'sfntDirectoryEntry', 'sfntFeatureHeader', 'sfntFeatureName', 'sfntFontDescriptor', 'sfntFontFeatureSetting', 'sfntFontRunFeature', 'sfntInstance', 'sfntNameHeader', 'sfntNameRecord', 'sfntVariationAxis', 'sfntVariationHeader'}
if __name__ == '__main__': # pragma: no cover
import os
import re
- FRAMEWORKS_PATH = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/'
+ FRAMEWORKS_PATH = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/'
frameworks = os.listdir(FRAMEWORKS_PATH)
all_interfaces = set()
@@ -36,14 +36,14 @@ if __name__ == '__main__': # pragma: no cover
if not f.endswith('.h'):
continue
headerFilePath = frameworkHeadersDir + f
-
- try:
- with open(headerFilePath, encoding='utf-8') as f:
- content = f.read()
- except UnicodeDecodeError:
- print("Decoding error for file: {0}".format(headerFilePath))
- continue
-
+
+ try:
+ with open(headerFilePath, encoding='utf-8') as f:
+ content = f.read()
+ except UnicodeDecodeError:
+ print("Decoding error for file: {0}".format(headerFilePath))
+ continue
+
res = re.findall(r'(?<=@interface )\w+', content)
for r in res:
all_interfaces.add(r)
@@ -66,10 +66,10 @@ if __name__ == '__main__': # pragma: no cover
print("ALL interfaces: \n")
- print(sorted(list(all_interfaces)))
+ print(sorted(list(all_interfaces)))
print("\nALL protocols: \n")
- print(sorted(list(all_protocols)))
+ print(sorted(list(all_protocols)))
print("\nALL primitives: \n")
- print(sorted(list(all_primitives)))
+ print(sorted(list(all_primitives)))
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py
index e7e395dc6a..f2fdef2774 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py
@@ -2,83 +2,83 @@
pygments.lexers._csound_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-REMOVED_OPCODES = set('''
-OSCsendA
-beadsynt
-beosc
-buchla
-getrowlin
-lua_exec
-lua_iaopcall
-lua_iaopcall_off
-lua_ikopcall
-lua_ikopcall_off
-lua_iopcall
-lua_iopcall_off
-lua_opdef
-mp3scal_check
-mp3scal_load
-mp3scal_load2
-mp3scal_play
-mp3scal_play2
-pvsgendy
-socksend_k
-signalflowgraph
-sumTableFilter
-systime
-tabrowlin
-vbap1move
-'''.split())
-
-# Opcodes in Csound 6.16.0 using:
-# python3 -c "
-# import re
-# from subprocess import Popen, PIPE
-# output = Popen(['csound', '--list-opcodes0'], stderr=PIPE, text=True).communicate()[1]
-# opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
-# output = Popen(['csound', '--list-opcodes2'], stderr=PIPE, text=True).communicate()[1]
-# all_opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
+REMOVED_OPCODES = set('''
+OSCsendA
+beadsynt
+beosc
+buchla
+getrowlin
+lua_exec
+lua_iaopcall
+lua_iaopcall_off
+lua_ikopcall
+lua_ikopcall_off
+lua_iopcall
+lua_iopcall_off
+lua_opdef
+mp3scal_check
+mp3scal_load
+mp3scal_load2
+mp3scal_play
+mp3scal_play2
+pvsgendy
+socksend_k
+signalflowgraph
+sumTableFilter
+systime
+tabrowlin
+vbap1move
+'''.split())
+
+# Opcodes in Csound 6.16.0 using:
+# python3 -c "
+# import re
+# from subprocess import Popen, PIPE
+# output = Popen(['csound', '--list-opcodes0'], stderr=PIPE, text=True).communicate()[1]
+# opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
+# output = Popen(['csound', '--list-opcodes2'], stderr=PIPE, text=True).communicate()[1]
+# all_opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
# deprecated_opcodes = [opcode for opcode in all_opcodes if opcode not in opcodes]
-# # Remove opcodes that csound.py treats as keywords.
-# keyword_opcodes = [
-# 'cggoto', # https://csound.com/docs/manual/cggoto.html
-# 'cigoto', # https://csound.com/docs/manual/cigoto.html
-# 'cingoto', # (undocumented)
-# 'ckgoto', # https://csound.com/docs/manual/ckgoto.html
-# 'cngoto', # https://csound.com/docs/manual/cngoto.html
-# 'cnkgoto', # (undocumented)
-# 'endin', # https://csound.com/docs/manual/endin.html
-# 'endop', # https://csound.com/docs/manual/endop.html
-# 'goto', # https://csound.com/docs/manual/goto.html
-# 'igoto', # https://csound.com/docs/manual/igoto.html
-# 'instr', # https://csound.com/docs/manual/instr.html
-# 'kgoto', # https://csound.com/docs/manual/kgoto.html
-# 'loop_ge', # https://csound.com/docs/manual/loop_ge.html
-# 'loop_gt', # https://csound.com/docs/manual/loop_gt.html
-# 'loop_le', # https://csound.com/docs/manual/loop_le.html
-# 'loop_lt', # https://csound.com/docs/manual/loop_lt.html
-# 'opcode', # https://csound.com/docs/manual/opcode.html
-# 'reinit', # https://csound.com/docs/manual/reinit.html
-# 'return', # https://csound.com/docs/manual/return.html
-# 'rireturn', # https://csound.com/docs/manual/rireturn.html
-# 'rigoto', # https://csound.com/docs/manual/rigoto.html
-# 'tigoto', # https://csound.com/docs/manual/tigoto.html
-# 'timout' # https://csound.com/docs/manual/timout.html
-# ]
-# opcodes = [opcode for opcode in opcodes if opcode not in keyword_opcodes]
-# newline = '\n'
-# print(f'''OPCODES = set(\'''
-# {newline.join(opcodes)}
+# # Remove opcodes that csound.py treats as keywords.
+# keyword_opcodes = [
+# 'cggoto', # https://csound.com/docs/manual/cggoto.html
+# 'cigoto', # https://csound.com/docs/manual/cigoto.html
+# 'cingoto', # (undocumented)
+# 'ckgoto', # https://csound.com/docs/manual/ckgoto.html
+# 'cngoto', # https://csound.com/docs/manual/cngoto.html
+# 'cnkgoto', # (undocumented)
+# 'endin', # https://csound.com/docs/manual/endin.html
+# 'endop', # https://csound.com/docs/manual/endop.html
+# 'goto', # https://csound.com/docs/manual/goto.html
+# 'igoto', # https://csound.com/docs/manual/igoto.html
+# 'instr', # https://csound.com/docs/manual/instr.html
+# 'kgoto', # https://csound.com/docs/manual/kgoto.html
+# 'loop_ge', # https://csound.com/docs/manual/loop_ge.html
+# 'loop_gt', # https://csound.com/docs/manual/loop_gt.html
+# 'loop_le', # https://csound.com/docs/manual/loop_le.html
+# 'loop_lt', # https://csound.com/docs/manual/loop_lt.html
+# 'opcode', # https://csound.com/docs/manual/opcode.html
+# 'reinit', # https://csound.com/docs/manual/reinit.html
+# 'return', # https://csound.com/docs/manual/return.html
+# 'rireturn', # https://csound.com/docs/manual/rireturn.html
+# 'rigoto', # https://csound.com/docs/manual/rigoto.html
+# 'tigoto', # https://csound.com/docs/manual/tigoto.html
+# 'timout' # https://csound.com/docs/manual/timout.html
+# ]
+# opcodes = [opcode for opcode in opcodes if opcode not in keyword_opcodes]
+# newline = '\n'
+# print(f'''OPCODES = set(\'''
+# {newline.join(opcodes)}
# \'''.split())
#
# DEPRECATED_OPCODES = set(\'''
-# {newline.join(deprecated_opcodes)}
+# {newline.join(deprecated_opcodes)}
# \'''.split())
-# ''')
+# ''')
# "
OPCODES = set('''
@@ -200,7 +200,7 @@ STKBowed
STKBrass
STKClarinet
STKDrummer
-STKFMVoices
+STKFMVoices
STKFlute
STKHevyMetl
STKMandolin
@@ -227,25 +227,25 @@ adsyn
adsynt
adsynt2
aftouch
-allpole
+allpole
alpass
alwayson
ampdb
ampdbfs
ampmidi
-ampmidicurve
+ampmidicurve
ampmidid
-apoleparams
-arduinoRead
-arduinoReadF
-arduinoStart
-arduinoStop
+apoleparams
+arduinoRead
+arduinoReadF
+arduinoStart
+arduinoStop
areson
aresonk
atone
atonek
atonex
-autocorr
+autocorr
babo
balance
balance2
@@ -261,7 +261,7 @@ binit
biquad
biquada
birnd
-bob
+bob
bpf
bpfcos
bqrez
@@ -287,7 +287,7 @@ centroid
ceps
cepsinv
chanctrl
-changed
+changed
changed2
chani
chano
@@ -299,19 +299,19 @@ chn_k
chnclear
chnexport
chnget
-chngeta
-chngeti
-chngetk
+chngeta
+chngeti
+chngetk
chngetks
-chngets
+chngets
chnmix
chnparams
chnset
-chnseta
-chnseti
-chnsetk
+chnseta
+chnseti
+chnsetk
chnsetks
-chnsets
+chnsets
chuap
clear
clfilt
@@ -320,13 +320,13 @@ clockoff
clockon
cmp
cmplxprod
-cntCreate
-cntCycles
-cntDelete
-cntDelete_i
-cntRead
-cntReset
-cntState
+cntCreate
+cntCycles
+cntDelete
+cntDelete_i
+cntRead
+cntReset
+cntState
comb
combinv
compilecsd
@@ -346,8 +346,8 @@ cosinv
cosseg
cossegb
cossegr
-count
-count_i
+count
+count_i
cps2pch
cpsmidi
cpsmidib
@@ -373,11 +373,11 @@ ctrl14
ctrl21
ctrl7
ctrlinit
-ctrlpreset
-ctrlprint
-ctrlprintpresets
-ctrlsave
-ctrlselect
+ctrlpreset
+ctrlprint
+ctrlprintpresets
+ctrlsave
+ctrlselect
cuserrnd
dam
date
@@ -478,17 +478,17 @@ flashtxt
flooper
flooper2
floor
-fluidAllOut
-fluidCCi
-fluidCCk
-fluidControl
-fluidEngine
-fluidInfo
-fluidLoad
-fluidNote
-fluidOut
-fluidProgramSelect
-fluidSetInterpMethod
+fluidAllOut
+fluidCCi
+fluidCCk
+fluidControl
+fluidEngine
+fluidInfo
+fluidLoad
+fluidNote
+fluidOut
+fluidProgramSelect
+fluidSetInterpMethod
fmanal
fmax
fmb3
@@ -523,7 +523,7 @@ ftaudio
ftchnls
ftconv
ftcps
-ftexists
+ftexists
ftfree
ftgen
ftgenonce
@@ -540,9 +540,9 @@ ftresizei
ftsamplebank
ftsave
ftsavek
-ftset
+ftset
ftslice
-ftslicei
+ftslicei
ftsr
gain
gainslider
@@ -565,7 +565,7 @@ grain
grain2
grain3
granule
-gtf
+gtf
guiro
harmon
harmon2
@@ -673,10 +673,10 @@ la_i_multiply_mc
la_i_multiply_mr
la_i_multiply_vc
la_i_multiply_vr
-la_i_norm1_mc
-la_i_norm1_mr
-la_i_norm1_vc
-la_i_norm1_vr
+la_i_norm1_mc
+la_i_norm1_mr
+la_i_norm1_vc
+la_i_norm1_vr
la_i_norm_euclid_mc
la_i_norm_euclid_mr
la_i_norm_euclid_vc
@@ -771,10 +771,10 @@ la_k_multiply_mc
la_k_multiply_mr
la_k_multiply_vc
la_k_multiply_vr
-la_k_norm1_mc
-la_k_norm1_mr
-la_k_norm1_vc
-la_k_norm1_vr
+la_k_norm1_mc
+la_k_norm1_mr
+la_k_norm1_vc
+la_k_norm1_vr
la_k_norm_euclid_mc
la_k_norm_euclid_mr
la_k_norm_euclid_vc
@@ -806,12 +806,12 @@ la_k_upper_solve_mc
la_k_upper_solve_mr
la_k_vc_set
la_k_vr_set
-lag
-lagud
-lastcycle
+lag
+lagud
+lastcycle
lenarray
lfo
-lfsr
+lfsr
limit
limit1
lincos
@@ -855,8 +855,8 @@ loscilx
lowpass2
lowres
lowresx
-lpcanal
-lpcfilter
+lpcanal
+lpcfilter
lpf18
lpform
lpfreson
@@ -872,7 +872,7 @@ lpreson
lpshold
lpsholdp
lpslot
-lufs
+lufs
mac
maca
madsr
@@ -895,7 +895,7 @@ mdelay
median
mediank
metro
-metro2
+metro2
mfb
midglobal
midiarp
@@ -948,7 +948,7 @@ mp3scal
mp3sr
mpulse
mrtmsg
-ms2st
+ms2st
mtof
mton
multitap
@@ -958,7 +958,7 @@ mvclpf1
mvclpf2
mvclpf3
mvclpf4
-mvmfilter
+mvmfilter
mxadsr
nchnls_hw
nestedap
@@ -976,8 +976,8 @@ nrpn
nsamp
nstance
nstrnum
-nstrstr
-ntof
+nstrstr
+ntof
ntom
ntrpol
nxtpow2
@@ -1002,7 +1002,7 @@ oscils
oscilx
out
out32
-outall
+outall
outc
outch
outh
@@ -1103,9 +1103,9 @@ printk
printk2
printks
printks2
-println
+println
prints
-printsk
+printsk
product
pset
ptablew
@@ -1124,7 +1124,7 @@ pvsanal
pvsarp
pvsbandp
pvsbandr
-pvsbandwidth
+pvsbandwidth
pvsbin
pvsblur
pvsbuffer
@@ -1133,7 +1133,7 @@ pvsbufread2
pvscale
pvscent
pvsceps
-pvscfs
+pvscfs
pvscross
pvsdemix
pvsdiskin
@@ -1153,7 +1153,7 @@ pvsin
pvsinfo
pvsinit
pvslock
-pvslpc
+pvslpc
pvsmaska
pvsmix
pvsmooth
@@ -1255,7 +1255,7 @@ qinf
qnan
r2c
rand
-randc
+randc
randh
randi
random
@@ -1279,7 +1279,7 @@ remove
repluck
reshapearray
reson
-resonbnk
+resonbnk
resonk
resonr
resonx
@@ -1297,7 +1297,7 @@ rifft
rms
rnd
rnd31
-rndseed
+rndseed
round
rspline
rtclock
@@ -1310,17 +1310,17 @@ sc_lagud
sc_phasor
sc_trig
scale
-scale2
+scale2
scalearray
scanhammer
scans
scantable
scanu
-scanu2
+scanu2
schedkwhen
schedkwhennamed
schedule
-schedulek
+schedulek
schedwhen
scoreline
scoreline_i
@@ -1366,7 +1366,7 @@ sin
sinh
sininv
sinsyn
-skf
+skf
sleighbells
slicearray
slicearray_i
@@ -1402,16 +1402,16 @@ spat3d
spat3di
spat3dt
spdist
-spf
+spf
splitrig
sprintf
sprintfk
spsend
sqrt
squinewave
-st2ms
+st2ms
statevar
-sterrain
+sterrain
stix
strcat
strcatk
@@ -1427,7 +1427,7 @@ strfromurl
strget
strindex
strindexk
-string2array
+string2array
strlen
strlenk
strlower
@@ -1435,7 +1435,7 @@ strlowerk
strrindex
strrindexk
strset
-strstrip
+strstrip
strsub
strsubk
strtod
@@ -1450,7 +1450,7 @@ subinstrinit
sum
sumarray
svfilter
-svn
+svn
syncgrain
syncloop
syncphasor
@@ -1522,11 +1522,11 @@ transegr
trcross
trfilter
trhighest
-trigExpseg
-trigLinseg
+trigExpseg
+trigLinseg
trigger
-trighold
-trigphasor
+trighold
+trigphasor
trigseq
trim
trim_i
@@ -1538,8 +1538,8 @@ trshift
trsplit
turnoff
turnoff2
-turnoff2_i
-turnoff3
+turnoff2_i
+turnoff3
turnon
tvconv
unirand
@@ -1563,7 +1563,7 @@ vbapmove
vbapz
vbapzmove
vcella
-vclpf
+vclpf
vco
vco2
vco2ft
@@ -1612,7 +1612,7 @@ vpow
vpow_i
vpowv
vpowv_i
-vps
+vps
vpvoc
vrandh
vrandi
@@ -1652,7 +1652,7 @@ window
wrap
writescratch
wterrain
-wterrain2
+wterrain2
xadsr
xin
xout
@@ -1698,10 +1698,10 @@ maxtab
mintab
pop
pop_f
-ptable
-ptable3
-ptablei
-ptableiw
+ptable
+ptable3
+ptablei
+ptableiw
push
push_f
scalet
@@ -1720,7 +1720,7 @@ spectrum
stack
sumtab
tabgen
-tableiw
+tableiw
tabmap
tabmap_i
tabslice
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_julia_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_julia_builtins.py
index ae786d89c0..e66e40b527 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_julia_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_julia_builtins.py
@@ -8,404 +8,404 @@
:license: BSD, see LICENSE for details.
"""
-# operators
-# see https://github.com/JuliaLang/julia/blob/master/src/julia-parser.scm
-# Julia v1.6.0-rc1
-OPERATORS_LIST = [
- # other
- '->',
- # prec-assignment
- ':=', '$=',
- # prec-conditional, prec-lazy-or, prec-lazy-and
- '?', '||', '&&',
- # prec-colon
- ':',
- # prec-plus
- '$',
- # prec-decl
- '::',
-]
-DOTTED_OPERATORS_LIST = [
- # prec-assignment
- r'=', r'+=', r'-=', r'*=', r'/=', r'//=', r'\=', r'^=', r'÷=', r'%=', r'<<=',
- r'>>=', r'>>>=', r'|=', r'&=', r'⊻=', r'≔', r'⩴', r"≕'", r'~',
- # prec-pair
- '=>',
- # prec-arrow
- r'→', r'↔', r'↚', r'↛', r'↞', r'↠', r'↢', r'↣', r'↦', r'↤', r'↮', r'⇎', r'⇍', r'⇏',
- r'⇐', r'⇒', r'⇔', r'⇴', r'⇶', r'⇷', r'⇸', r'⇹', r'⇺', r'⇻', r'⇼', r'⇽', r'⇾', r'⇿',
- r'⟵', r'⟶', r'⟷', r'⟹', r'⟺', r'⟻', r'⟼', r'⟽', r'⟾', r'⟿', r'⤀', r'⤁', r'⤂', r'⤃',
- r'⤄', r'⤅', r'⤆', r'⤇', r'⤌', r'⤍', r'⤎', r'⤏', r'⤐', r'⤑', r'⤔', r'⤕', r'⤖', r'⤗',
- r'⤘', r'⤝', r'⤞', r'⤟', r'⤠', r'⥄', r'⥅', r'⥆', r'⥇', r'⥈', r'⥊', r'⥋', r'⥎', r'⥐',
- r'⥒', r'⥓', r'⥖', r'⥗', r'⥚', r'⥛', r'⥞', r'⥟', r'⥢', r'⥤', r'⥦', r'⥧', r'⥨', r'⥩',
- r'⥪', r'⥫', r'⥬', r'⥭', r'⥰', r'⧴', r'⬱', r'⬰', r'⬲', r'⬳', r'⬴', r'⬵', r'⬶', r'⬷',
- r'⬸', r'⬹', r'⬺', r'⬻', r'⬼', r'⬽', r'⬾', r'⬿', r'⭀', r'⭁', r'⭂', r'⭃', r'⭄', r'⭇',
- r'⭈', r'⭉', r'⭊', r'⭋', r'⭌', r'←', r'→', r'⇜', r'⇝', r'↜', r'↝', r'↩', r'↪', r'↫',
- r'↬', r'↼', r'↽', r'⇀', r'⇁', r'⇄', r'⇆', r'⇇', r'⇉', r'⇋', r'⇌', r'⇚', r'⇛', r'⇠',
- r'⇢', r'↷', r'↶', r'↺', r'↻', r'-->', r'<--', r'<-->',
- # prec-comparison
- r'>', r'<', r'>=', r'≥', r'<=', r'≤', r'==', r'===', r'≡', r'!=', r'≠', r'!==',
- r'≢', r'∈', r'∉', r'∋', r'∌', r'⊆', r'⊈', r'⊂', r'⊄', r'⊊', r'∝', r'∊', r'∍', r'∥',
- r'∦', r'∷', r'∺', r'∻', r'∽', r'∾', r'≁', r'≃', r'≂', r'≄', r'≅', r'≆', r'≇', r'≈',
- r'≉', r'≊', r'≋', r'≌', r'≍', r'≎', r'≐', r'≑', r'≒', r'≓', r'≖', r'≗', r'≘', r'≙',
- r'≚', r'≛', r'≜', r'≝', r'≞', r'≟', r'≣', r'≦', r'≧', r'≨', r'≩', r'≪', r'≫', r'≬',
- r'≭', r'≮', r'≯', r'≰', r'≱', r'≲', r'≳', r'≴', r'≵', r'≶', r'≷', r'≸', r'≹', r'≺',
- r'≻', r'≼', r'≽', r'≾', r'≿', r'⊀', r'⊁', r'⊃', r'⊅', r'⊇', r'⊉', r'⊋', r'⊏', r'⊐',
- r'⊑', r'⊒', r'⊜', r'⊩', r'⊬', r'⊮', r'⊰', r'⊱', r'⊲', r'⊳', r'⊴', r'⊵', r'⊶', r'⊷',
- r'⋍', r'⋐', r'⋑', r'⋕', r'⋖', r'⋗', r'⋘', r'⋙', r'⋚', r'⋛', r'⋜', r'⋝', r'⋞', r'⋟',
- r'⋠', r'⋡', r'⋢', r'⋣', r'⋤', r'⋥', r'⋦', r'⋧', r'⋨', r'⋩', r'⋪', r'⋫', r'⋬', r'⋭',
- r'⋲', r'⋳', r'⋴', r'⋵', r'⋶', r'⋷', r'⋸', r'⋹', r'⋺', r'⋻', r'⋼', r'⋽', r'⋾', r'⋿',
- r'⟈', r'⟉', r'⟒', r'⦷', r'⧀', r'⧁', r'⧡', r'⧣', r'⧤', r'⧥', r'⩦', r'⩧', r'⩪', r'⩫',
- r'⩬', r'⩭', r'⩮', r'⩯', r'⩰', r'⩱', r'⩲', r'⩳', r'⩵', r'⩶', r'⩷', r'⩸', r'⩹', r'⩺',
- r'⩻', r'⩼', r'⩽', r'⩾', r'⩿', r'⪀', r'⪁', r'⪂', r'⪃', r'⪄', r'⪅', r'⪆', r'⪇', r'⪈',
- r'⪉', r'⪊', r'⪋', r'⪌', r'⪍', r'⪎', r'⪏', r'⪐', r'⪑', r'⪒', r'⪓', r'⪔', r'⪕', r'⪖',
- r'⪗', r'⪘', r'⪙', r'⪚', r'⪛', r'⪜', r'⪝', r'⪞', r'⪟', r'⪠', r'⪡', r'⪢', r'⪣', r'⪤',
- r'⪥', r'⪦', r'⪧', r'⪨', r'⪩', r'⪪', r'⪫', r'⪬', r'⪭', r'⪮', r'⪯', r'⪰', r'⪱', r'⪲',
- r'⪳', r'⪴', r'⪵', r'⪶', r'⪷', r'⪸', r'⪹', r'⪺', r'⪻', r'⪼', r'⪽', r'⪾', r'⪿', r'⫀',
- r'⫁', r'⫂', r'⫃', r'⫄', r'⫅', r'⫆', r'⫇', r'⫈', r'⫉', r'⫊', r'⫋', r'⫌', r'⫍', r'⫎',
- r'⫏', r'⫐', r'⫑', r'⫒', r'⫓', r'⫔', r'⫕', r'⫖', r'⫗', r'⫘', r'⫙', r'⫷', r'⫸', r'⫹',
- r'⫺', r'⊢', r'⊣', r'⟂', r'<:', r'>:',
- # prec-pipe
- '<|', '|>',
- # prec-colon
- r'…', r'⁝', r'⋮', r'⋱', r'⋰', r'⋯',
- # prec-plus
- r'+', r'-', r'¦', r'|', r'⊕', r'⊖', r'⊞', r'⊟', r'++', r'∪', r'∨', r'⊔', r'±', r'∓',
- r'∔', r'∸', r'≏', r'⊎', r'⊻', r'⊽', r'⋎', r'⋓', r'⧺', r'⧻', r'⨈', r'⨢', r'⨣', r'⨤',
- r'⨥', r'⨦', r'⨧', r'⨨', r'⨩', r'⨪', r'⨫', r'⨬', r'⨭', r'⨮', r'⨹', r'⨺', r'⩁', r'⩂',
- r'⩅', r'⩊', r'⩌', r'⩏', r'⩐', r'⩒', r'⩔', r'⩖', r'⩗', r'⩛', r'⩝', r'⩡', r'⩢', r'⩣',
- # prec-times
- r'*', r'/', r'⌿', r'÷', r'%', r'&', r'⋅', r'∘', r'×', '\\', r'∩', r'∧', r'⊗', r'⊘',
- r'⊙', r'⊚', r'⊛', r'⊠', r'⊡', r'⊓', r'∗', r'∙', r'∤', r'⅋', r'≀', r'⊼', r'⋄', r'⋆',
- r'⋇', r'⋉', r'⋊', r'⋋', r'⋌', r'⋏', r'⋒', r'⟑', r'⦸', r'⦼', r'⦾', r'⦿', r'⧶', r'⧷',
- r'⨇', r'⨰', r'⨱', r'⨲', r'⨳', r'⨴', r'⨵', r'⨶', r'⨷', r'⨸', r'⨻', r'⨼', r'⨽', r'⩀',
- r'⩃', r'⩄', r'⩋', r'⩍', r'⩎', r'⩑', r'⩓', r'⩕', r'⩘', r'⩚', r'⩜', r'⩞', r'⩟', r'⩠',
- r'⫛', r'⊍', r'▷', r'⨝', r'⟕', r'⟖', r'⟗', r'⨟',
- # prec-rational, prec-bitshift
- '//', '>>', '<<', '>>>',
- # prec-power
- r'^', r'↑', r'↓', r'⇵', r'⟰', r'⟱', r'⤈', r'⤉', r'⤊', r'⤋', r'⤒', r'⤓', r'⥉', r'⥌',
- r'⥍', r'⥏', r'⥑', r'⥔', r'⥕', r'⥘', r'⥙', r'⥜', r'⥝', r'⥠', r'⥡', r'⥣', r'⥥', r'⥮',
- r'⥯', r'↑', r'↓',
- # unary-ops, excluding unary-and-binary-ops
- '!', r'¬', r'√', r'∛', r'∜'
-]
-
-# Generated with the following in Julia v1.6.0-rc1
-'''
-#!/usr/bin/env julia
-
-import REPL.REPLCompletions
-res = String["in", "isa", "where"]
-for kw in collect(x.keyword for x in REPLCompletions.complete_keyword(""))
- if !(contains(kw, " ") || kw == "struct")
- push!(res, kw)
- end
-end
-sort!(unique!(setdiff!(res, ["true", "false"])))
-foreach(x -> println("\'", x, "\',"), res)
-'''
-KEYWORD_LIST = (
- 'baremodule',
- 'begin',
- 'break',
- 'catch',
- 'ccall',
- 'const',
- 'continue',
- 'do',
- 'else',
- 'elseif',
- 'end',
- 'export',
- 'finally',
- 'for',
- 'function',
- 'global',
- 'if',
- 'import',
- 'in',
- 'isa',
- 'let',
- 'local',
- 'macro',
- 'module',
- 'quote',
- 'return',
- 'try',
- 'using',
- 'where',
- 'while',
-)
-
-# Generated with the following in Julia v1.6.0-rc1
-'''
-#!/usr/bin/env julia
-
-import REPL.REPLCompletions
-res = String[]
-for compl in filter!(x -> isa(x, REPLCompletions.ModuleCompletion) && (x.parent === Base || x.parent === Core),
- REPLCompletions.completions("", 0)[1])
- try
- v = eval(Symbol(compl.mod))
- if (v isa Type || v isa TypeVar) && (compl.mod != "=>")
- push!(res, compl.mod)
- end
- catch e
- end
-end
-sort!(unique!(res))
-foreach(x -> println("\'", x, "\',"), res)
-'''
-BUILTIN_LIST = (
- 'AbstractArray',
- 'AbstractChannel',
- 'AbstractChar',
- 'AbstractDict',
- 'AbstractDisplay',
- 'AbstractFloat',
- 'AbstractIrrational',
- 'AbstractMatch',
- 'AbstractMatrix',
- 'AbstractPattern',
- 'AbstractRange',
- 'AbstractSet',
- 'AbstractString',
- 'AbstractUnitRange',
- 'AbstractVecOrMat',
- 'AbstractVector',
- 'Any',
- 'ArgumentError',
- 'Array',
- 'AssertionError',
- 'BigFloat',
- 'BigInt',
- 'BitArray',
- 'BitMatrix',
- 'BitSet',
- 'BitVector',
- 'Bool',
- 'BoundsError',
- 'CapturedException',
- 'CartesianIndex',
- 'CartesianIndices',
- 'Cchar',
- 'Cdouble',
- 'Cfloat',
- 'Channel',
- 'Char',
- 'Cint',
- 'Cintmax_t',
- 'Clong',
- 'Clonglong',
- 'Cmd',
- 'Colon',
- 'Complex',
- 'ComplexF16',
- 'ComplexF32',
- 'ComplexF64',
- 'ComposedFunction',
- 'CompositeException',
- 'Condition',
- 'Cptrdiff_t',
- 'Cshort',
- 'Csize_t',
- 'Cssize_t',
- 'Cstring',
- 'Cuchar',
- 'Cuint',
- 'Cuintmax_t',
- 'Culong',
- 'Culonglong',
- 'Cushort',
- 'Cvoid',
- 'Cwchar_t',
- 'Cwstring',
- 'DataType',
- 'DenseArray',
- 'DenseMatrix',
- 'DenseVecOrMat',
- 'DenseVector',
- 'Dict',
- 'DimensionMismatch',
- 'Dims',
- 'DivideError',
- 'DomainError',
- 'EOFError',
- 'Enum',
- 'ErrorException',
- 'Exception',
- 'ExponentialBackOff',
- 'Expr',
- 'Float16',
- 'Float32',
- 'Float64',
- 'Function',
- 'GlobalRef',
- 'HTML',
- 'IO',
- 'IOBuffer',
- 'IOContext',
- 'IOStream',
- 'IdDict',
- 'IndexCartesian',
- 'IndexLinear',
- 'IndexStyle',
- 'InexactError',
- 'InitError',
- 'Int',
- 'Int128',
- 'Int16',
- 'Int32',
- 'Int64',
- 'Int8',
- 'Integer',
- 'InterruptException',
- 'InvalidStateException',
- 'Irrational',
- 'KeyError',
- 'LinRange',
- 'LineNumberNode',
- 'LinearIndices',
- 'LoadError',
- 'MIME',
- 'Matrix',
- 'Method',
- 'MethodError',
- 'Missing',
- 'MissingException',
- 'Module',
- 'NTuple',
- 'NamedTuple',
- 'Nothing',
- 'Number',
- 'OrdinalRange',
- 'OutOfMemoryError',
- 'OverflowError',
- 'Pair',
- 'PartialQuickSort',
- 'PermutedDimsArray',
- 'Pipe',
- 'ProcessFailedException',
- 'Ptr',
- 'QuoteNode',
- 'Rational',
- 'RawFD',
- 'ReadOnlyMemoryError',
- 'Real',
- 'ReentrantLock',
- 'Ref',
- 'Regex',
- 'RegexMatch',
- 'RoundingMode',
- 'SegmentationFault',
- 'Set',
- 'Signed',
- 'Some',
- 'StackOverflowError',
- 'StepRange',
- 'StepRangeLen',
- 'StridedArray',
- 'StridedMatrix',
- 'StridedVecOrMat',
- 'StridedVector',
- 'String',
- 'StringIndexError',
- 'SubArray',
- 'SubString',
- 'SubstitutionString',
- 'Symbol',
- 'SystemError',
- 'Task',
- 'TaskFailedException',
- 'Text',
- 'TextDisplay',
- 'Timer',
- 'Tuple',
- 'Type',
- 'TypeError',
- 'TypeVar',
- 'UInt',
- 'UInt128',
- 'UInt16',
- 'UInt32',
- 'UInt64',
- 'UInt8',
- 'UndefInitializer',
- 'UndefKeywordError',
- 'UndefRefError',
- 'UndefVarError',
- 'Union',
- 'UnionAll',
- 'UnitRange',
- 'Unsigned',
- 'Val',
- 'Vararg',
- 'VecElement',
- 'VecOrMat',
- 'Vector',
- 'VersionNumber',
- 'WeakKeyDict',
- 'WeakRef',
-)
-
-# Generated with the following in Julia v1.6.0-rc1
-'''
-#!/usr/bin/env julia
-
-import REPL.REPLCompletions
-res = String["true", "false"]
-for compl in filter!(x -> isa(x, REPLCompletions.ModuleCompletion) && (x.parent === Base || x.parent === Core),
- REPLCompletions.completions("", 0)[1])
- try
- v = eval(Symbol(compl.mod))
- if !(v isa Function || v isa Type || v isa TypeVar || v isa Module || v isa Colon)
- push!(res, compl.mod)
- end
- catch e
- end
-end
-sort!(unique!(res))
-foreach(x -> println("\'", x, "\',"), res)
-'''
-LITERAL_LIST = (
- 'ARGS',
- 'C_NULL',
- 'DEPOT_PATH',
- 'ENDIAN_BOM',
- 'ENV',
- 'Inf',
- 'Inf16',
- 'Inf32',
- 'Inf64',
- 'InsertionSort',
- 'LOAD_PATH',
- 'MergeSort',
- 'NaN',
- 'NaN16',
- 'NaN32',
- 'NaN64',
- 'PROGRAM_FILE',
- 'QuickSort',
- 'RoundDown',
- 'RoundFromZero',
- 'RoundNearest',
- 'RoundNearestTiesAway',
- 'RoundNearestTiesUp',
- 'RoundToZero',
- 'RoundUp',
- 'VERSION',
- 'devnull',
- 'false',
- 'im',
- 'missing',
- 'nothing',
- 'pi',
- 'stderr',
- 'stdin',
- 'stdout',
- 'true',
- 'undef',
- 'π',
- 'ℯ',
-)
+# operators
+# see https://github.com/JuliaLang/julia/blob/master/src/julia-parser.scm
+# Julia v1.6.0-rc1
+OPERATORS_LIST = [
+ # other
+ '->',
+ # prec-assignment
+ ':=', '$=',
+ # prec-conditional, prec-lazy-or, prec-lazy-and
+ '?', '||', '&&',
+ # prec-colon
+ ':',
+ # prec-plus
+ '$',
+ # prec-decl
+ '::',
+]
+DOTTED_OPERATORS_LIST = [
+ # prec-assignment
+ r'=', r'+=', r'-=', r'*=', r'/=', r'//=', r'\=', r'^=', r'÷=', r'%=', r'<<=',
+ r'>>=', r'>>>=', r'|=', r'&=', r'⊻=', r'≔', r'⩴', r"≕'", r'~',
+ # prec-pair
+ '=>',
+ # prec-arrow
+ r'→', r'↔', r'↚', r'↛', r'↞', r'↠', r'↢', r'↣', r'↦', r'↤', r'↮', r'⇎', r'⇍', r'⇏',
+ r'⇐', r'⇒', r'⇔', r'⇴', r'⇶', r'⇷', r'⇸', r'⇹', r'⇺', r'⇻', r'⇼', r'⇽', r'⇾', r'⇿',
+ r'⟵', r'⟶', r'⟷', r'⟹', r'⟺', r'⟻', r'⟼', r'⟽', r'⟾', r'⟿', r'⤀', r'⤁', r'⤂', r'⤃',
+ r'⤄', r'⤅', r'⤆', r'⤇', r'⤌', r'⤍', r'⤎', r'⤏', r'⤐', r'⤑', r'⤔', r'⤕', r'⤖', r'⤗',
+ r'⤘', r'⤝', r'⤞', r'⤟', r'⤠', r'⥄', r'⥅', r'⥆', r'⥇', r'⥈', r'⥊', r'⥋', r'⥎', r'⥐',
+ r'⥒', r'⥓', r'⥖', r'⥗', r'⥚', r'⥛', r'⥞', r'⥟', r'⥢', r'⥤', r'⥦', r'⥧', r'⥨', r'⥩',
+ r'⥪', r'⥫', r'⥬', r'⥭', r'⥰', r'⧴', r'⬱', r'⬰', r'⬲', r'⬳', r'⬴', r'⬵', r'⬶', r'⬷',
+ r'⬸', r'⬹', r'⬺', r'⬻', r'⬼', r'⬽', r'⬾', r'⬿', r'⭀', r'⭁', r'⭂', r'⭃', r'⭄', r'⭇',
+ r'⭈', r'⭉', r'⭊', r'⭋', r'⭌', r'←', r'→', r'⇜', r'⇝', r'↜', r'↝', r'↩', r'↪', r'↫',
+ r'↬', r'↼', r'↽', r'⇀', r'⇁', r'⇄', r'⇆', r'⇇', r'⇉', r'⇋', r'⇌', r'⇚', r'⇛', r'⇠',
+ r'⇢', r'↷', r'↶', r'↺', r'↻', r'-->', r'<--', r'<-->',
+ # prec-comparison
+ r'>', r'<', r'>=', r'≥', r'<=', r'≤', r'==', r'===', r'≡', r'!=', r'≠', r'!==',
+ r'≢', r'∈', r'∉', r'∋', r'∌', r'⊆', r'⊈', r'⊂', r'⊄', r'⊊', r'∝', r'∊', r'∍', r'∥',
+ r'∦', r'∷', r'∺', r'∻', r'∽', r'∾', r'≁', r'≃', r'≂', r'≄', r'≅', r'≆', r'≇', r'≈',
+ r'≉', r'≊', r'≋', r'≌', r'≍', r'≎', r'≐', r'≑', r'≒', r'≓', r'≖', r'≗', r'≘', r'≙',
+ r'≚', r'≛', r'≜', r'≝', r'≞', r'≟', r'≣', r'≦', r'≧', r'≨', r'≩', r'≪', r'≫', r'≬',
+ r'≭', r'≮', r'≯', r'≰', r'≱', r'≲', r'≳', r'≴', r'≵', r'≶', r'≷', r'≸', r'≹', r'≺',
+ r'≻', r'≼', r'≽', r'≾', r'≿', r'⊀', r'⊁', r'⊃', r'⊅', r'⊇', r'⊉', r'⊋', r'⊏', r'⊐',
+ r'⊑', r'⊒', r'⊜', r'⊩', r'⊬', r'⊮', r'⊰', r'⊱', r'⊲', r'⊳', r'⊴', r'⊵', r'⊶', r'⊷',
+ r'⋍', r'⋐', r'⋑', r'⋕', r'⋖', r'⋗', r'⋘', r'⋙', r'⋚', r'⋛', r'⋜', r'⋝', r'⋞', r'⋟',
+ r'⋠', r'⋡', r'⋢', r'⋣', r'⋤', r'⋥', r'⋦', r'⋧', r'⋨', r'⋩', r'⋪', r'⋫', r'⋬', r'⋭',
+ r'⋲', r'⋳', r'⋴', r'⋵', r'⋶', r'⋷', r'⋸', r'⋹', r'⋺', r'⋻', r'⋼', r'⋽', r'⋾', r'⋿',
+ r'⟈', r'⟉', r'⟒', r'⦷', r'⧀', r'⧁', r'⧡', r'⧣', r'⧤', r'⧥', r'⩦', r'⩧', r'⩪', r'⩫',
+ r'⩬', r'⩭', r'⩮', r'⩯', r'⩰', r'⩱', r'⩲', r'⩳', r'⩵', r'⩶', r'⩷', r'⩸', r'⩹', r'⩺',
+ r'⩻', r'⩼', r'⩽', r'⩾', r'⩿', r'⪀', r'⪁', r'⪂', r'⪃', r'⪄', r'⪅', r'⪆', r'⪇', r'⪈',
+ r'⪉', r'⪊', r'⪋', r'⪌', r'⪍', r'⪎', r'⪏', r'⪐', r'⪑', r'⪒', r'⪓', r'⪔', r'⪕', r'⪖',
+ r'⪗', r'⪘', r'⪙', r'⪚', r'⪛', r'⪜', r'⪝', r'⪞', r'⪟', r'⪠', r'⪡', r'⪢', r'⪣', r'⪤',
+ r'⪥', r'⪦', r'⪧', r'⪨', r'⪩', r'⪪', r'⪫', r'⪬', r'⪭', r'⪮', r'⪯', r'⪰', r'⪱', r'⪲',
+ r'⪳', r'⪴', r'⪵', r'⪶', r'⪷', r'⪸', r'⪹', r'⪺', r'⪻', r'⪼', r'⪽', r'⪾', r'⪿', r'⫀',
+ r'⫁', r'⫂', r'⫃', r'⫄', r'⫅', r'⫆', r'⫇', r'⫈', r'⫉', r'⫊', r'⫋', r'⫌', r'⫍', r'⫎',
+ r'⫏', r'⫐', r'⫑', r'⫒', r'⫓', r'⫔', r'⫕', r'⫖', r'⫗', r'⫘', r'⫙', r'⫷', r'⫸', r'⫹',
+ r'⫺', r'⊢', r'⊣', r'⟂', r'<:', r'>:',
+ # prec-pipe
+ '<|', '|>',
+ # prec-colon
+ r'…', r'⁝', r'⋮', r'⋱', r'⋰', r'⋯',
+ # prec-plus
+ r'+', r'-', r'¦', r'|', r'⊕', r'⊖', r'⊞', r'⊟', r'++', r'∪', r'∨', r'⊔', r'±', r'∓',
+ r'∔', r'∸', r'≏', r'⊎', r'⊻', r'⊽', r'⋎', r'⋓', r'⧺', r'⧻', r'⨈', r'⨢', r'⨣', r'⨤',
+ r'⨥', r'⨦', r'⨧', r'⨨', r'⨩', r'⨪', r'⨫', r'⨬', r'⨭', r'⨮', r'⨹', r'⨺', r'⩁', r'⩂',
+ r'⩅', r'⩊', r'⩌', r'⩏', r'⩐', r'⩒', r'⩔', r'⩖', r'⩗', r'⩛', r'⩝', r'⩡', r'⩢', r'⩣',
+ # prec-times
+ r'*', r'/', r'⌿', r'÷', r'%', r'&', r'⋅', r'∘', r'×', '\\', r'∩', r'∧', r'⊗', r'⊘',
+ r'⊙', r'⊚', r'⊛', r'⊠', r'⊡', r'⊓', r'∗', r'∙', r'∤', r'⅋', r'≀', r'⊼', r'⋄', r'⋆',
+ r'⋇', r'⋉', r'⋊', r'⋋', r'⋌', r'⋏', r'⋒', r'⟑', r'⦸', r'⦼', r'⦾', r'⦿', r'⧶', r'⧷',
+ r'⨇', r'⨰', r'⨱', r'⨲', r'⨳', r'⨴', r'⨵', r'⨶', r'⨷', r'⨸', r'⨻', r'⨼', r'⨽', r'⩀',
+ r'⩃', r'⩄', r'⩋', r'⩍', r'⩎', r'⩑', r'⩓', r'⩕', r'⩘', r'⩚', r'⩜', r'⩞', r'⩟', r'⩠',
+ r'⫛', r'⊍', r'▷', r'⨝', r'⟕', r'⟖', r'⟗', r'⨟',
+ # prec-rational, prec-bitshift
+ '//', '>>', '<<', '>>>',
+ # prec-power
+ r'^', r'↑', r'↓', r'⇵', r'⟰', r'⟱', r'⤈', r'⤉', r'⤊', r'⤋', r'⤒', r'⤓', r'⥉', r'⥌',
+ r'⥍', r'⥏', r'⥑', r'⥔', r'⥕', r'⥘', r'⥙', r'⥜', r'⥝', r'⥠', r'⥡', r'⥣', r'⥥', r'⥮',
+ r'⥯', r'↑', r'↓',
+ # unary-ops, excluding unary-and-binary-ops
+ '!', r'¬', r'√', r'∛', r'∜'
+]
+
+# Generated with the following in Julia v1.6.0-rc1
+'''
+#!/usr/bin/env julia
+
+import REPL.REPLCompletions
+res = String["in", "isa", "where"]
+for kw in collect(x.keyword for x in REPLCompletions.complete_keyword(""))
+ if !(contains(kw, " ") || kw == "struct")
+ push!(res, kw)
+ end
+end
+sort!(unique!(setdiff!(res, ["true", "false"])))
+foreach(x -> println("\'", x, "\',"), res)
+'''
+KEYWORD_LIST = (
+ 'baremodule',
+ 'begin',
+ 'break',
+ 'catch',
+ 'ccall',
+ 'const',
+ 'continue',
+ 'do',
+ 'else',
+ 'elseif',
+ 'end',
+ 'export',
+ 'finally',
+ 'for',
+ 'function',
+ 'global',
+ 'if',
+ 'import',
+ 'in',
+ 'isa',
+ 'let',
+ 'local',
+ 'macro',
+ 'module',
+ 'quote',
+ 'return',
+ 'try',
+ 'using',
+ 'where',
+ 'while',
+)
+
+# Generated with the following in Julia v1.6.0-rc1
+'''
+#!/usr/bin/env julia
+
+import REPL.REPLCompletions
+res = String[]
+for compl in filter!(x -> isa(x, REPLCompletions.ModuleCompletion) && (x.parent === Base || x.parent === Core),
+ REPLCompletions.completions("", 0)[1])
+ try
+ v = eval(Symbol(compl.mod))
+ if (v isa Type || v isa TypeVar) && (compl.mod != "=>")
+ push!(res, compl.mod)
+ end
+ catch e
+ end
+end
+sort!(unique!(res))
+foreach(x -> println("\'", x, "\',"), res)
+'''
+BUILTIN_LIST = (
+ 'AbstractArray',
+ 'AbstractChannel',
+ 'AbstractChar',
+ 'AbstractDict',
+ 'AbstractDisplay',
+ 'AbstractFloat',
+ 'AbstractIrrational',
+ 'AbstractMatch',
+ 'AbstractMatrix',
+ 'AbstractPattern',
+ 'AbstractRange',
+ 'AbstractSet',
+ 'AbstractString',
+ 'AbstractUnitRange',
+ 'AbstractVecOrMat',
+ 'AbstractVector',
+ 'Any',
+ 'ArgumentError',
+ 'Array',
+ 'AssertionError',
+ 'BigFloat',
+ 'BigInt',
+ 'BitArray',
+ 'BitMatrix',
+ 'BitSet',
+ 'BitVector',
+ 'Bool',
+ 'BoundsError',
+ 'CapturedException',
+ 'CartesianIndex',
+ 'CartesianIndices',
+ 'Cchar',
+ 'Cdouble',
+ 'Cfloat',
+ 'Channel',
+ 'Char',
+ 'Cint',
+ 'Cintmax_t',
+ 'Clong',
+ 'Clonglong',
+ 'Cmd',
+ 'Colon',
+ 'Complex',
+ 'ComplexF16',
+ 'ComplexF32',
+ 'ComplexF64',
+ 'ComposedFunction',
+ 'CompositeException',
+ 'Condition',
+ 'Cptrdiff_t',
+ 'Cshort',
+ 'Csize_t',
+ 'Cssize_t',
+ 'Cstring',
+ 'Cuchar',
+ 'Cuint',
+ 'Cuintmax_t',
+ 'Culong',
+ 'Culonglong',
+ 'Cushort',
+ 'Cvoid',
+ 'Cwchar_t',
+ 'Cwstring',
+ 'DataType',
+ 'DenseArray',
+ 'DenseMatrix',
+ 'DenseVecOrMat',
+ 'DenseVector',
+ 'Dict',
+ 'DimensionMismatch',
+ 'Dims',
+ 'DivideError',
+ 'DomainError',
+ 'EOFError',
+ 'Enum',
+ 'ErrorException',
+ 'Exception',
+ 'ExponentialBackOff',
+ 'Expr',
+ 'Float16',
+ 'Float32',
+ 'Float64',
+ 'Function',
+ 'GlobalRef',
+ 'HTML',
+ 'IO',
+ 'IOBuffer',
+ 'IOContext',
+ 'IOStream',
+ 'IdDict',
+ 'IndexCartesian',
+ 'IndexLinear',
+ 'IndexStyle',
+ 'InexactError',
+ 'InitError',
+ 'Int',
+ 'Int128',
+ 'Int16',
+ 'Int32',
+ 'Int64',
+ 'Int8',
+ 'Integer',
+ 'InterruptException',
+ 'InvalidStateException',
+ 'Irrational',
+ 'KeyError',
+ 'LinRange',
+ 'LineNumberNode',
+ 'LinearIndices',
+ 'LoadError',
+ 'MIME',
+ 'Matrix',
+ 'Method',
+ 'MethodError',
+ 'Missing',
+ 'MissingException',
+ 'Module',
+ 'NTuple',
+ 'NamedTuple',
+ 'Nothing',
+ 'Number',
+ 'OrdinalRange',
+ 'OutOfMemoryError',
+ 'OverflowError',
+ 'Pair',
+ 'PartialQuickSort',
+ 'PermutedDimsArray',
+ 'Pipe',
+ 'ProcessFailedException',
+ 'Ptr',
+ 'QuoteNode',
+ 'Rational',
+ 'RawFD',
+ 'ReadOnlyMemoryError',
+ 'Real',
+ 'ReentrantLock',
+ 'Ref',
+ 'Regex',
+ 'RegexMatch',
+ 'RoundingMode',
+ 'SegmentationFault',
+ 'Set',
+ 'Signed',
+ 'Some',
+ 'StackOverflowError',
+ 'StepRange',
+ 'StepRangeLen',
+ 'StridedArray',
+ 'StridedMatrix',
+ 'StridedVecOrMat',
+ 'StridedVector',
+ 'String',
+ 'StringIndexError',
+ 'SubArray',
+ 'SubString',
+ 'SubstitutionString',
+ 'Symbol',
+ 'SystemError',
+ 'Task',
+ 'TaskFailedException',
+ 'Text',
+ 'TextDisplay',
+ 'Timer',
+ 'Tuple',
+ 'Type',
+ 'TypeError',
+ 'TypeVar',
+ 'UInt',
+ 'UInt128',
+ 'UInt16',
+ 'UInt32',
+ 'UInt64',
+ 'UInt8',
+ 'UndefInitializer',
+ 'UndefKeywordError',
+ 'UndefRefError',
+ 'UndefVarError',
+ 'Union',
+ 'UnionAll',
+ 'UnitRange',
+ 'Unsigned',
+ 'Val',
+ 'Vararg',
+ 'VecElement',
+ 'VecOrMat',
+ 'Vector',
+ 'VersionNumber',
+ 'WeakKeyDict',
+ 'WeakRef',
+)
+
+# Generated with the following in Julia v1.6.0-rc1
+'''
+#!/usr/bin/env julia
+
+import REPL.REPLCompletions
+res = String["true", "false"]
+for compl in filter!(x -> isa(x, REPLCompletions.ModuleCompletion) && (x.parent === Base || x.parent === Core),
+ REPLCompletions.completions("", 0)[1])
+ try
+ v = eval(Symbol(compl.mod))
+ if !(v isa Function || v isa Type || v isa TypeVar || v isa Module || v isa Colon)
+ push!(res, compl.mod)
+ end
+ catch e
+ end
+end
+sort!(unique!(res))
+foreach(x -> println("\'", x, "\',"), res)
+'''
+LITERAL_LIST = (
+ 'ARGS',
+ 'C_NULL',
+ 'DEPOT_PATH',
+ 'ENDIAN_BOM',
+ 'ENV',
+ 'Inf',
+ 'Inf16',
+ 'Inf32',
+ 'Inf64',
+ 'InsertionSort',
+ 'LOAD_PATH',
+ 'MergeSort',
+ 'NaN',
+ 'NaN16',
+ 'NaN32',
+ 'NaN64',
+ 'PROGRAM_FILE',
+ 'QuickSort',
+ 'RoundDown',
+ 'RoundFromZero',
+ 'RoundNearest',
+ 'RoundNearestTiesAway',
+ 'RoundNearestTiesUp',
+ 'RoundToZero',
+ 'RoundUp',
+ 'VERSION',
+ 'devnull',
+ 'false',
+ 'im',
+ 'missing',
+ 'nothing',
+ 'pi',
+ 'stderr',
+ 'stdin',
+ 'stdout',
+ 'true',
+ 'undef',
+ 'π',
+ 'ℯ',
+)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py
index 8fd0ff1be2..d79af5a93a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py
@@ -4,7 +4,7 @@
Built-in Lasso types, traits, methods, and members.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py
index f6a9b796ee..34dcd10925 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py
@@ -8,7 +8,7 @@
Do not edit the MODULES dict by hand.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -285,7 +285,7 @@ if __name__ == '__main__': # pragma: no cover
print('>> %s' % full_function_name)
m = get_function_module(full_function_name)
modules.setdefault(m, []).append(full_function_name)
- modules = {k: tuple(v) for k, v in modules.items()}
+ modules = {k: tuple(v) for k, v in modules.items()}
regenerate(__file__, modules)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_mapping.py b/contrib/python/Pygments/py3/pygments/lexers/_mapping.py
index a7b1b2d96a..8d81d2789d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_mapping.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_mapping.py
@@ -14,21 +14,21 @@
LEXERS = {
'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)),
- 'AMDGPULexer': ('pygments.lexers.amdgpu', 'AMDGPU', ('amdgpu',), ('*.isa',), ()),
- 'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl', '*.aplf', '*.aplo', '*.apln', '*.aplc', '*.apli', '*.dyalog'), ()),
+ 'AMDGPULexer': ('pygments.lexers.amdgpu', 'AMDGPU', ('amdgpu',), ('*.isa',), ()),
+ 'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl', '*.aplf', '*.aplo', '*.apln', '*.aplc', '*.apli', '*.dyalog'), ()),
'AbnfLexer': ('pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)),
- 'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('actionscript3', 'as3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
- 'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('actionscript', 'as'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
+ 'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('actionscript3', 'as3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
+ 'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('actionscript', 'as'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
'AdlLexer': ('pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()),
'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
'AheuiLexer': ('pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()),
'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
- 'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('ambienttalk', 'ambienttalk/2', 'at'), ('*.at',), ('text/x-ambienttalk',)),
+ 'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('ambienttalk', 'ambienttalk/2', 'at'), ('*.at',), ('text/x-ambienttalk',)),
'AmplLexer': ('pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()),
'Angular2HtmlLexer': ('pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()),
'Angular2Lexer': ('pygments.lexers.templates', 'Angular2', ('ng2',), (), ()),
- 'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-actionscript', 'antlr-as'), ('*.G', '*.g'), ()),
+ 'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-actionscript', 'antlr-as'), ('*.G', '*.g'), ()),
'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
'AntlrJavaLexer': ('pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
@@ -40,26 +40,26 @@ LEXERS = {
'ApacheConfLexer': ('pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
'ArduinoLexer': ('pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
- 'ArrowLexer': ('pygments.lexers.arrow', 'Arrow', ('arrow',), ('*.arw',), ()),
- 'AscLexer': ('pygments.lexers.asc', 'ASCII armored', ('asc', 'pem'), ('*.asc', '*.pem', 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa'), ('application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature')),
+ 'ArrowLexer': ('pygments.lexers.arrow', 'Arrow', ('arrow',), ('*.arw',), ()),
+ 'AscLexer': ('pygments.lexers.asc', 'ASCII armored', ('asc', 'pem'), ('*.asc', '*.pem', 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa'), ('application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature')),
'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
- 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asymptote', 'asy'), ('*.asy',), ('text/x-asymptote',)),
+ 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asymptote', 'asy'), ('*.asy',), ('text/x-asymptote',)),
'AugeasLexer': ('pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()),
'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
- 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('autohotkey', 'ahk'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
+ 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('autohotkey', 'ahk'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
'BBCBasicLexer': ('pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()),
'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
'BSTLexer': ('pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
- 'BareLexer': ('pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()),
+ 'BareLexer': ('pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()),
'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', '.kshrc', 'kshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
- 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('batch', 'bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
+ 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('batch', 'bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
'BddLexer': ('pygments.lexers.bdd', 'Bdd', ('bdd',), ('*.feature',), ('text/x-bdd',)),
'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
- 'BibTeXLexer': ('pygments.lexers.bibtex', 'BibTeX', ('bibtex', 'bib'), ('*.bib',), ('text/x-bibtex',)),
+ 'BibTeXLexer': ('pygments.lexers.bibtex', 'BibTeX', ('bibtex', 'bib'), ('*.bib',), ('text/x-bibtex',)),
'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
'BnfLexer': ('pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)),
@@ -80,14 +80,14 @@ LEXERS = {
'CapDLLexer': ('pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()),
'CapnProtoLexer': ('pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()),
'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
- 'CddlLexer': ('pygments.lexers.cddl', 'CDDL', ('cddl',), ('*.cddl',), ('text/x-cddl',)),
+ 'CddlLexer': ('pygments.lexers.cddl', 'CDDL', ('cddl',), ('*.cddl',), ('text/x-cddl',)),
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
- 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chaiscript', 'chai'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
+ 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chaiscript', 'chai'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
'CharmciLexer': ('pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()),
'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
- 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('javascript+cheetah', 'js+cheetah', 'javascript+spitfire', 'js+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
+ 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('javascript+cheetah', 'js+cheetah', 'javascript+spitfire', 'js+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)),
@@ -97,7 +97,7 @@ LEXERS = {
'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
- 'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffeescript', 'coffee-script', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
+ 'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffeescript', 'coffee-script', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)),
'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
@@ -114,7 +114,7 @@ LEXERS = {
'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()),
'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()),
'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
- 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+ruby', 'css+erb'), (), ('text/css+ruby',)),
+ 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+ruby', 'css+erb'), (), ('text/css+ruby',)),
'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
'CssLexer': ('pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)),
'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
@@ -127,9 +127,9 @@ LEXERS = {
'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
'Dasm16Lexer': ('pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)),
- 'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()),
+ 'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()),
'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
- 'DevicetreeLexer': ('pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)),
+ 'DevicetreeLexer': ('pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)),
'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
@@ -146,21 +146,21 @@ LEXERS = {
'EbnfLexer': ('pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)),
'ElixirConsoleLexer': ('pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
- 'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs', '*.leex'), ('text/x-elixir',)),
+ 'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs', '*.leex'), ('text/x-elixir',)),
'ElmLexer': ('pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)),
'ElpiLexer': ('pygments.lexers.elpi', 'Elpi', ('elpi',), ('*.elpi',), ('text/x-elpi',)),
- 'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs-lisp', 'elisp', 'emacs'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
- 'EmailLexer': ('pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)),
+ 'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs-lisp', 'elisp', 'emacs'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
+ 'EmailLexer': ('pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)),
'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
- 'ExeclineLexer': ('pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()),
+ 'ExeclineLexer': ('pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()),
'EzhilLexer': ('pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)),
'FSharpLexer': ('pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
- 'FStarLexer': ('pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)),
+ 'FStarLexer': ('pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)),
'FactorLexer': ('pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
@@ -174,24 +174,24 @@ LEXERS = {
'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran', 'f90'), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
'FreeFemLexer': ('pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)),
- 'FutharkLexer': ('pygments.lexers.futhark', 'Futhark', ('futhark',), ('*.fut',), ('text/x-futhark',)),
+ 'FutharkLexer': ('pygments.lexers.futhark', 'Futhark', ('futhark',), ('*.fut',), ('text/x-futhark',)),
'GAPLexer': ('pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
- 'GDScriptLexer': ('pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')),
+ 'GDScriptLexer': ('pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')),
'GLShaderLexer': ('pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
- 'GSQLLexer': ('pygments.lexers.gsql', 'GSQL', ('gsql',), ('*.gsql',), ()),
+ 'GSQLLexer': ('pygments.lexers.gsql', 'GSQL', ('gsql',), ('*.gsql',), ()),
'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
- 'GcodeLexer': ('pygments.lexers.gcodelexer', 'g-code', ('gcode',), ('*.gcode',), ()),
+ 'GcodeLexer': ('pygments.lexers.gcodelexer', 'g-code', ('gcode',), ('*.gcode',), ()),
'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
'GettextLexer': ('pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
- 'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('gherkin', 'cucumber'), ('*.feature',), ('text/x-gherkin',)),
+ 'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('gherkin', 'cucumber'), ('*.feature',), ('text/x-gherkin',)),
'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
- 'GoLexer': ('pygments.lexers.go', 'Go', ('go', 'golang'), ('*.go',), ('text/x-gosrc',)),
+ 'GoLexer': ('pygments.lexers.go', 'Go', ('go', 'golang'), ('*.go',), ('text/x-gosrc',)),
'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()),
'GoodDataCLLexer': ('pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
- 'GraphvizLexer': ('pygments.lexers.graphviz', 'Graphviz', ('graphviz', 'dot'), ('*.gv', '*.dot'), ('text/x-graphviz', 'text/vnd.graphviz')),
+ 'GraphvizLexer': ('pygments.lexers.graphviz', 'Graphviz', ('graphviz', 'dot'), ('*.gv', '*.dot'), ('text/x-graphviz', 'text/vnd.graphviz')),
'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1-9]', '*.man', '*.1p', '*.3pm'), ('application/x-troff', 'text/troff')),
'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)),
'HLSLShaderLexer': ('pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)),
@@ -199,7 +199,7 @@ LEXERS = {
'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
- 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('haxe', 'hxsl', 'hx'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
+ 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('haxe', 'hxsl', 'hx'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
'HsailLexer': ('pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)),
'HspecLexer': ('pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()),
@@ -225,81 +225,81 @@ LEXERS = {
'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
'JLexer': ('pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)),
- 'JSLTLexer': ('pygments.lexers.jslt', 'JSLT', ('jslt',), ('*.jslt',), ('text/x-jslt',)),
+ 'JSLTLexer': ('pygments.lexers.jslt', 'JSLT', ('jslt',), ('*.jslt',), ('text/x-jslt',)),
'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
- 'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('javascript+django', 'js+django', 'javascript+jinja', 'js+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
- 'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
+ 'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('javascript+django', 'js+django', 'javascript+jinja', 'js+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
+ 'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
- 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('javascript', 'js'), ('*.js', '*.jsm', '*.mjs', '*.cjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
- 'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('javascript+php', 'js+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
- 'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('javascript+smarty', 'js+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
+ 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('javascript', 'js'), ('*.js', '*.jsm', '*.mjs', '*.cjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
+ 'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('javascript+php', 'js+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
+ 'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('javascript+smarty', 'js+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
'JclLexer': ('pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
'JsgfLexer': ('pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')),
- 'JsonBareObjectLexer': ('pygments.lexers.data', 'JSONBareObject', (), (), ()),
+ 'JsonBareObjectLexer': ('pygments.lexers.data', 'JSONBareObject', (), (), ()),
'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
- 'JsonLexer': ('pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')),
+ 'JsonLexer': ('pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')),
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
- 'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon', 'julia-repl'), (), ()),
+ 'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon', 'julia-repl'), (), ()),
'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
- 'JuttleLexer': ('pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')),
+ 'JuttleLexer': ('pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')),
'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
- 'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
- 'KernelLogLexer': ('pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()),
+ 'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
+ 'KernelLogLexer': ('pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()),
'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
- 'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)),
- 'KuinLexer': ('pygments.lexers.kuin', 'Kuin', ('kuin',), ('*.kn',), ()),
+ 'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)),
+ 'KuinLexer': ('pygments.lexers.kuin', 'Kuin', ('kuin',), ('*.kn',), ()),
'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)),
'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
- 'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('javascript+lasso', 'js+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
+ 'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('javascript+lasso', 'js+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
'LassoLexer': ('pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
'LeanLexer': ('pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)),
'LessCssLexer': ('pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)),
- 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighttpd', 'lighty'), ('lighttpd.conf',), ('text/x-lighttpd-conf',)),
+ 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighttpd', 'lighty'), ('lighttpd.conf',), ('text/x-lighttpd-conf',)),
'LilyPondLexer': ('pygments.lexers.lilypond', 'LilyPond', ('lilypond',), ('*.ly',), ()),
'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)),
'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()),
- 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('literate-agda', 'lagda'), ('*.lagda',), ('text/x-literate-agda',)),
- 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('literate-cryptol', 'lcryptol', 'lcry'), ('*.lcry',), ('text/x-literate-cryptol',)),
- 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('literate-haskell', 'lhaskell', 'lhs'), ('*.lhs',), ('text/x-literate-haskell',)),
- 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('literate-idris', 'lidris', 'lidr'), ('*.lidr',), ('text/x-literate-idris',)),
- 'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('livescript', 'live-script'), ('*.ls',), ('text/livescript',)),
+ 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('literate-agda', 'lagda'), ('*.lagda',), ('text/x-literate-agda',)),
+ 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('literate-cryptol', 'lcryptol', 'lcry'), ('*.lcry',), ('text/x-literate-cryptol',)),
+ 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('literate-haskell', 'lhaskell', 'lhs'), ('*.lhs',), ('text/x-literate-haskell',)),
+ 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('literate-idris', 'lidris', 'lidr'), ('*.lidr',), ('text/x-literate-idris',)),
+ 'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('livescript', 'live-script'), ('*.ls',), ('text/livescript',)),
'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
- 'LlvmMirBodyLexer': ('pygments.lexers.asm', 'LLVM-MIR Body', ('llvm-mir-body',), (), ()),
- 'LlvmMirLexer': ('pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()),
+ 'LlvmMirBodyLexer': ('pygments.lexers.asm', 'LLVM-MIR Body', ('llvm-mir-body',), (), ()),
+ 'LlvmMirLexer': ('pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()),
'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
- 'MIMELexer': ('pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
+ 'MIMELexer': ('pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
'MSDOSSessionLexer': ('pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()),
'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
- 'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('javascript+mako', 'js+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
+ 'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('javascript+mako', 'js+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
- 'MarkdownLexer': ('pygments.lexers.markup', 'Markdown', ('markdown', 'md'), ('*.md', '*.markdown'), ('text/x-markdown',)),
+ 'MarkdownLexer': ('pygments.lexers.markup', 'Markdown', ('markdown', 'md'), ('*.md', '*.markdown'), ('text/x-markdown',)),
'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
'MatlabLexer': ('pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
'MatlabSessionLexer': ('pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()),
'MaximaLexer': ('pygments.lexers.maxima', 'Maxima', ('maxima', 'macsyma'), ('*.mac', '*.max'), ()),
- 'MesonLexer': ('pygments.lexers.meson', 'Meson', ('meson', 'meson.build'), ('meson.build', 'meson_options.txt'), ('text/x-meson',)),
+ 'MesonLexer': ('pygments.lexers.meson', 'Meson', ('meson', 'meson.build'), ('meson.build', 'meson_options.txt'), ('text/x-meson',)),
'MiniDLexer': ('pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)),
- 'MiniScriptLexer': ('pygments.lexers.scripting', 'MiniScript', ('miniscript', 'ms'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')),
+ 'MiniScriptLexer': ('pygments.lexers.scripting', 'MiniScript', ('miniscript', 'ms'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')),
'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
'Modula2Lexer': ('pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
'MonteLexer': ('pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()),
- 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moonscript', 'moon'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
- 'MoselLexer': ('pygments.lexers.mosel', 'Mosel', ('mosel',), ('*.mos',), ()),
+ 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moonscript', 'moon'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
+ 'MoselLexer': ('pygments.lexers.mosel', 'Mosel', ('mosel',), ('*.mos',), ()),
'MozPreprocCssLexer': ('pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()),
'MozPreprocHashLexer': ('pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()),
'MozPreprocJavascriptLexer': ('pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()),
@@ -312,7 +312,7 @@ LEXERS = {
'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
- 'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('javascript+myghty', 'js+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
+ 'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('javascript+myghty', 'js+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
'NCLLexer': ('pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)),
@@ -321,15 +321,15 @@ LEXERS = {
'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
- 'NestedTextLexer': ('pygments.lexers.configs', 'NestedText', ('nestedtext', 'nt'), ('*.nt',), ()),
+ 'NestedTextLexer': ('pygments.lexers.configs', 'NestedText', ('nestedtext', 'nt'), ('*.nt',), ()),
'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')),
'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)),
- 'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
+ 'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
- 'NodeConsoleLexer': ('pygments.lexers.javascript', 'Node.js REPL console session', ('nodejsrepl',), (), ('text/x-nodejsrepl',)),
- 'NotmuchLexer': ('pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()),
+ 'NodeConsoleLexer': ('pygments.lexers.javascript', 'Node.js REPL console session', ('nodejsrepl',), (), ('text/x-nodejsrepl',)),
+ 'NotmuchLexer': ('pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()),
'NuSMVLexer': ('pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()),
'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
@@ -339,51 +339,51 @@ LEXERS = {
'OcamlLexer': ('pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
'OctaveLexer': ('pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
'OdinLexer': ('pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)),
- 'OmgIdlLexer': ('pygments.lexers.c_like', 'OMG Interface Definition Language', ('omg-idl',), ('*.idl', '*.pidl'), ()),
+ 'OmgIdlLexer': ('pygments.lexers.c_like', 'OMG Interface Definition Language', ('omg-idl',), ('*.idl', '*.pidl'), ()),
'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
- 'OutputLexer': ('pygments.lexers.special', 'Text output', ('output',), (), ()),
+ 'OutputLexer': ('pygments.lexers.special', 'Text output', ('output',), (), ()),
'PacmanConfLexer': ('pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()),
'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
'ParaSailLexer': ('pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)),
'PawnLexer': ('pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
- 'PegLexer': ('pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)),
- 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')),
- 'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')),
+ 'PegLexer': ('pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)),
+ 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')),
+ 'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')),
'PhpLexer': ('pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)),
'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
'PkgConfigLexer': ('pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()),
'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
- 'PointlessLexer': ('pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()),
+ 'PointlessLexer': ('pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()),
'PonyLexer': ('pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()),
'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
'PovrayLexer': ('pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
- 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'pwsh', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
- 'PowerShellSessionLexer': ('pygments.lexers.shell', 'PowerShell Session', ('pwsh-session', 'ps1con'), (), ()),
+ 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'pwsh', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
+ 'PowerShellSessionLexer': ('pygments.lexers.shell', 'PowerShell Session', ('pwsh-session', 'ps1con'), (), ()),
'PraatLexer': ('pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()),
- 'ProcfileLexer': ('pygments.lexers.procfile', 'Procfile', ('procfile',), ('Procfile',), ()),
+ 'ProcfileLexer': ('pygments.lexers.procfile', 'Procfile', ('procfile',), ('Procfile',), ()),
'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
- 'PromQLLexer': ('pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()),
+ 'PromQLLexer': ('pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()),
'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
- 'PsyshConsoleLexer': ('pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()),
+ 'PsyshConsoleLexer': ('pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()),
'PugLexer': ('pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')),
'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
- 'Python2Lexer': ('pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
- 'Python2TracebackLexer': ('pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
+ 'Python2Lexer': ('pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
+ 'Python2TracebackLexer': ('pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
- 'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
- 'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
+ 'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
+ 'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
'QBasicLexer': ('pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
- 'RNCCompactLexer': ('pygments.lexers.rnc', 'Relax-NG Compact', ('rng-compact', 'rnc'), ('*.rnc',), ()),
+ 'RNCCompactLexer': ('pygments.lexers.rnc', 'Relax-NG Compact', ('rng-compact', 'rnc'), ('*.rnc',), ()),
'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
@@ -394,28 +394,28 @@ LEXERS = {
'RagelLexer': ('pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()),
'RagelObjectiveCLexer': ('pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
- 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', (), (), ('application/x-pygments-tokens',)),
+ 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', (), (), ('application/x-pygments-tokens',)),
'RdLexer': ('pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
- 'ReasonLexer': ('pygments.lexers.ml', 'ReasonML', ('reasonml', 'reason'), ('*.re', '*.rei'), ('text/x-reasonml',)),
+ 'ReasonLexer': ('pygments.lexers.ml', 'ReasonML', ('reasonml', 'reason'), ('*.re', '*.rei'), ('text/x-reasonml',)),
'RebolLexer': ('pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)),
'RedLexer': ('pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
- 'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resourcebundle', 'resource'), (), ()),
+ 'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resourcebundle', 'resource'), (), ()),
'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
- 'RideLexer': ('pygments.lexers.ride', 'Ride', ('ride',), ('*.ride',), ('text/x-ride',)),
+ 'RideLexer': ('pygments.lexers.ride', 'Ride', ('ride',), ('*.ride',), ('text/x-ride',)),
'RitaLexer': ('pygments.lexers.rita', 'Rita', ('rita',), ('*.rita',), ('text/rita',)),
'RoboconfGraphLexer': ('pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()),
'RoboconfInstancesLexer': ('pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()),
- 'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)),
+ 'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)),
'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
- 'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('restructuredtext', 'rst', 'rest'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
- 'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('trafficscript', 'rts'), ('*.rts',), ()),
+ 'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('restructuredtext', 'rst', 'rest'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
+ 'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('trafficscript', 'rts'), ('*.rts',), ()),
'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('ruby', 'rb', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile', 'Vagrantfile'), ('text/x-ruby', 'application/x-ruby')),
- 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust', 'text/x-rust')),
+ 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust', 'text/x-rust')),
'SASLexer': ('pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')),
'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
@@ -424,30 +424,30 @@ LEXERS = {
'SaviLexer': ('pygments.lexers.savi', 'Savi', ('savi',), ('*.savi',), ()),
'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
- 'ScdocLexer': ('pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()),
+ 'ScdocLexer': ('pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()),
'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
'SedLexer': ('pygments.lexers.textedit', 'Sed', ('sed', 'gsed', 'ssed'), ('*.sed', '*.[gs]sed'), ('text/x-sed',)),
- 'ShExCLexer': ('pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)),
+ 'ShExCLexer': ('pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)),
'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
- 'SieveLexer': ('pygments.lexers.sieve', 'Sieve', ('sieve',), ('*.siv', '*.sieve'), ()),
+ 'SieveLexer': ('pygments.lexers.sieve', 'Sieve', ('sieve',), ('*.siv', '*.sieve'), ()),
'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()),
- 'SingularityLexer': ('pygments.lexers.configs', 'Singularity', ('singularity',), ('*.def', 'Singularity'), ()),
- 'SlashLexer': ('pygments.lexers.slash', 'Slash', ('slash',), ('*.sla',), ()),
+ 'SingularityLexer': ('pygments.lexers.configs', 'Singularity', ('singularity',), ('*.def', 'Singularity'), ()),
+ 'SlashLexer': ('pygments.lexers.slash', 'Slash', ('slash',), ('*.sla',), ()),
'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
'SlurmBashLexer': ('pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()),
'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
'SmartGameFormatLexer': ('pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()),
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
- 'SmithyLexer': ('pygments.lexers.smithy', 'Smithy', ('smithy',), ('*.smithy',), ()),
+ 'SmithyLexer': ('pygments.lexers.smithy', 'Smithy', ('smithy',), ('*.smithy',), ()),
'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
- 'SolidityLexer': ('pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
+ 'SolidityLexer': ('pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
'SophiaLexer': ('pygments.lexers.sophia', 'Sophia', ('sophia',), ('*.aes',), ()),
'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
- 'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('debsources', 'sourceslist', 'sources.list'), ('sources.list',), ()),
+ 'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('debsources', 'sourceslist', 'sources.list'), ('sources.list',), ()),
'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
'SpiceLexer': ('pygments.lexers.spice', 'Spice', ('spice', 'spicelang'), ('*.spice',), ('text/x-spice',)),
'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
@@ -457,43 +457,43 @@ LEXERS = {
'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
'StataLexer': ('pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')),
- 'SuperColliderLexer': ('pygments.lexers.supercollider', 'SuperCollider', ('supercollider', 'sc'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')),
+ 'SuperColliderLexer': ('pygments.lexers.supercollider', 'SuperCollider', ('supercollider', 'sc'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')),
'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
- 'TNTLexer': ('pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()),
- 'TOMLLexer': ('pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ()),
+ 'TNTLexer': ('pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()),
+ 'TOMLLexer': ('pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ()),
'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
'TasmLexer': ('pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)),
'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
'TcshSessionLexer': ('pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()),
'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
- 'TealLexer': ('pygments.lexers.teal', 'teal', ('teal',), ('*.teal',), ()),
- 'TeraTermLexer': ('pygments.lexers.teraterm', 'Tera Term macro', ('teratermmacro', 'teraterm', 'ttl'), ('*.ttl',), ('text/x-teratermmacro',)),
+ 'TealLexer': ('pygments.lexers.teal', 'teal', ('teal',), ('*.teal',), ()),
+ 'TeraTermLexer': ('pygments.lexers.teraterm', 'Tera Term macro', ('teratermmacro', 'teraterm', 'ttl'), ('*.ttl',), ('text/x-teratermmacro',)),
'TermcapLexer': ('pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()),
'TerminfoLexer': ('pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()),
'TerraformLexer': ('pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')),
'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
- 'ThingsDBLexer': ('pygments.lexers.thingsdb', 'ThingsDB', ('ti', 'thingsdb'), ('*.ti',), ()),
+ 'ThingsDBLexer': ('pygments.lexers.thingsdb', 'ThingsDB', ('ti', 'thingsdb'), ('*.ti',), ()),
'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)),
- 'TiddlyWiki5Lexer': ('pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)),
+ 'TiddlyWiki5Lexer': ('pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)),
'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
'TransactSqlLexer': ('pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)),
'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
'TwigLexer': ('pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
- 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('typescript', 'ts'), ('*.ts',), ('application/x-typescript', 'text/x-typescript')),
+ 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('typescript', 'ts'), ('*.ts',), ('application/x-typescript', 'text/x-typescript')),
'TypoScriptCssDataLexer': ('pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()),
'TypoScriptHtmlDataLexer': ('pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()),
'TypoScriptLexer': ('pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)),
'UcodeLexer': ('pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()),
'UniconLexer': ('pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)),
'UrbiscriptLexer': ('pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
- 'UsdLexer': ('pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()),
+ 'UsdLexer': ('pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()),
'VBScriptLexer': ('pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()),
'VCLLexer': ('pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)),
'VCLSnippetLexer': ('pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)),
@@ -509,13 +509,13 @@ LEXERS = {
'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
'WDiffLexer': ('pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()),
- 'WatLexer': ('pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()),
- 'WebIDLLexer': ('pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()),
+ 'WatLexer': ('pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()),
+ 'WebIDLLexer': ('pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()),
'WhileyLexer': ('pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)),
'X10Lexer': ('pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)),
'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
- 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+ruby', 'xml+erb'), (), ('application/xml+ruby',)),
+ 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+ruby', 'xml+erb'), (), ('application/xml+ruby',)),
'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
@@ -525,11 +525,11 @@ LEXERS = {
'XtlangLexer': ('pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()),
'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')),
'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
- 'YangLexer': ('pygments.lexers.yang', 'YANG', ('yang',), ('*.yang',), ('application/yang',)),
- 'ZeekLexer': ('pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()),
+ 'YangLexer': ('pygments.lexers.yang', 'YANG', ('yang',), ('*.yang',), ('application/yang',)),
+ 'ZeekLexer': ('pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()),
'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
- 'ZigLexer': ('pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)),
- 'apdlexer': ('pygments.lexers.apdlexer', 'ANSYS parametric design language', ('ansys', 'apdl'), ('*.ans',), ()),
+ 'ZigLexer': ('pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)),
+ 'apdlexer': ('pygments.lexers.apdlexer', 'ANSYS parametric design language', ('ansys', 'apdl'), ('*.ans',), ()),
}
if __name__ == '__main__': # pragma: no cover
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py
index 5cc50a940b..03a5286b1f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py
@@ -4,7 +4,7 @@
Builtins for the MqlLexer.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
types = (
@@ -883,7 +883,7 @@ constants = (
'PERIOD_W1',
'POINTER_AUTOMATIC',
'POINTER_DYNAMIC',
- 'POINTER_INVALID',
+ 'POINTER_INVALID',
'PRICE_CLOSE',
'PRICE_HIGH',
'PRICE_LOW',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_mysql_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_mysql_builtins.py
index dfc82bccea..4bc640be35 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_mysql_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_mysql_builtins.py
@@ -1,1281 +1,1281 @@
-"""
- pygments.lexers._mysql_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Self-updating data files for the MySQL lexer.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-MYSQL_CONSTANTS = (
- 'false',
- 'null',
- 'true',
- 'unknown',
-)
-
-
-# At this time, no easily-parsed, definitive list of data types
-# has been found in the MySQL source code or documentation. (The
-# `sql/sql_yacc.yy` file is definitive but is difficult to parse.)
-# Therefore these types are currently maintained manually.
-#
-# Some words in this list -- like "long", "national", "precision",
-# and "varying" -- appear to only occur in combination with other
-# data type keywords. Therefore they are included as separate words
-# even though they do not naturally occur in syntax separately.
-#
-# This list is also used to strip data types out of the list of
-# MySQL keywords, which is automatically updated later in the file.
-#
-MYSQL_DATATYPES = (
- # Numeric data types
- 'bigint',
- 'bit',
- 'bool',
- 'boolean',
- 'dec',
- 'decimal',
- 'double',
- 'fixed',
- 'float',
- 'float4',
- 'float8',
- 'int',
- 'int1',
- 'int2',
- 'int3',
- 'int4',
- 'int8',
- 'integer',
- 'mediumint',
- 'middleint',
- 'numeric',
- 'precision',
- 'real',
- 'serial',
- 'smallint',
- 'tinyint',
-
- # Date and time data types
- 'date',
- 'datetime',
- 'time',
- 'timestamp',
- 'year',
-
- # String data types
- 'binary',
- 'blob',
- 'char',
- 'enum',
- 'long',
- 'longblob',
- 'longtext',
- 'mediumblob',
- 'mediumtext',
- 'national',
- 'nchar',
- 'nvarchar',
- 'set',
- 'text',
- 'tinyblob',
- 'tinytext',
- 'varbinary',
- 'varchar',
- 'varcharacter',
- 'varying',
-
- # Spatial data types
- 'geometry',
- 'geometrycollection',
- 'linestring',
- 'multilinestring',
- 'multipoint',
- 'multipolygon',
- 'point',
- 'polygon',
-
- # JSON data types
- 'json',
-)
-
-# Everything below this line is auto-generated from the MySQL source code.
-# Run this file in Python and it will update itself.
-# -----------------------------------------------------------------------------
-
-MYSQL_FUNCTIONS = (
- 'abs',
- 'acos',
- 'adddate',
- 'addtime',
- 'aes_decrypt',
- 'aes_encrypt',
- 'any_value',
- 'asin',
- 'atan',
- 'atan2',
- 'benchmark',
- 'bin',
- 'bin_to_uuid',
- 'bit_and',
- 'bit_count',
- 'bit_length',
- 'bit_or',
- 'bit_xor',
- 'can_access_column',
- 'can_access_database',
- 'can_access_event',
- 'can_access_resource_group',
- 'can_access_routine',
- 'can_access_table',
- 'can_access_trigger',
- 'can_access_view',
- 'cast',
- 'ceil',
- 'ceiling',
- 'char_length',
- 'character_length',
- 'coercibility',
- 'compress',
- 'concat',
- 'concat_ws',
- 'connection_id',
- 'conv',
- 'convert_cpu_id_mask',
- 'convert_interval_to_user_interval',
- 'convert_tz',
- 'cos',
- 'cot',
- 'count',
- 'crc32',
- 'curdate',
- 'current_role',
- 'curtime',
- 'date_add',
- 'date_format',
- 'date_sub',
- 'datediff',
- 'dayname',
- 'dayofmonth',
- 'dayofweek',
- 'dayofyear',
- 'degrees',
- 'elt',
- 'exp',
- 'export_set',
- 'extract',
- 'extractvalue',
- 'field',
- 'find_in_set',
- 'floor',
- 'format_bytes',
- 'format_pico_time',
- 'found_rows',
- 'from_base64',
- 'from_days',
- 'from_unixtime',
- 'get_dd_column_privileges',
- 'get_dd_create_options',
- 'get_dd_index_private_data',
- 'get_dd_index_sub_part_length',
- 'get_dd_property_key_value',
- 'get_dd_tablespace_private_data',
- 'get_lock',
- 'greatest',
- 'group_concat',
- 'gtid_subset',
- 'gtid_subtract',
- 'hex',
- 'icu_version',
- 'ifnull',
- 'inet6_aton',
- 'inet6_ntoa',
- 'inet_aton',
- 'inet_ntoa',
- 'instr',
- 'internal_auto_increment',
- 'internal_avg_row_length',
- 'internal_check_time',
- 'internal_checksum',
- 'internal_data_free',
- 'internal_data_length',
- 'internal_dd_char_length',
- 'internal_get_comment_or_error',
- 'internal_get_dd_column_extra',
- 'internal_get_enabled_role_json',
- 'internal_get_hostname',
- 'internal_get_mandatory_roles_json',
- 'internal_get_partition_nodegroup',
- 'internal_get_username',
- 'internal_get_view_warning_or_error',
- 'internal_index_column_cardinality',
- 'internal_index_length',
- 'internal_is_enabled_role',
- 'internal_is_mandatory_role',
- 'internal_keys_disabled',
- 'internal_max_data_length',
- 'internal_table_rows',
- 'internal_tablespace_autoextend_size',
- 'internal_tablespace_data_free',
- 'internal_tablespace_extent_size',
- 'internal_tablespace_extra',
- 'internal_tablespace_free_extents',
- 'internal_tablespace_id',
- 'internal_tablespace_initial_size',
- 'internal_tablespace_logfile_group_name',
- 'internal_tablespace_logfile_group_number',
- 'internal_tablespace_maximum_size',
- 'internal_tablespace_row_format',
- 'internal_tablespace_status',
- 'internal_tablespace_total_extents',
- 'internal_tablespace_type',
- 'internal_tablespace_version',
- 'internal_update_time',
- 'is_free_lock',
- 'is_ipv4',
- 'is_ipv4_compat',
- 'is_ipv4_mapped',
- 'is_ipv6',
- 'is_used_lock',
- 'is_uuid',
- 'is_visible_dd_object',
- 'isnull',
- 'json_array',
- 'json_array_append',
- 'json_array_insert',
- 'json_arrayagg',
- 'json_contains',
- 'json_contains_path',
- 'json_depth',
- 'json_extract',
- 'json_insert',
- 'json_keys',
- 'json_length',
- 'json_merge',
- 'json_merge_patch',
- 'json_merge_preserve',
- 'json_object',
- 'json_objectagg',
- 'json_overlaps',
- 'json_pretty',
- 'json_quote',
- 'json_remove',
- 'json_replace',
- 'json_schema_valid',
- 'json_schema_validation_report',
- 'json_search',
- 'json_set',
- 'json_storage_free',
- 'json_storage_size',
- 'json_type',
- 'json_unquote',
- 'json_valid',
- 'last_day',
- 'last_insert_id',
- 'lcase',
- 'least',
- 'length',
- 'like_range_max',
- 'like_range_min',
- 'ln',
- 'load_file',
- 'locate',
- 'log',
- 'log10',
- 'log2',
- 'lower',
- 'lpad',
- 'ltrim',
- 'make_set',
- 'makedate',
- 'maketime',
- 'master_pos_wait',
- 'max',
- 'mbrcontains',
- 'mbrcoveredby',
- 'mbrcovers',
- 'mbrdisjoint',
- 'mbrequals',
- 'mbrintersects',
- 'mbroverlaps',
- 'mbrtouches',
- 'mbrwithin',
- 'md5',
- 'mid',
- 'min',
- 'monthname',
- 'name_const',
- 'now',
- 'nullif',
- 'oct',
- 'octet_length',
- 'ord',
- 'period_add',
- 'period_diff',
- 'pi',
- 'position',
- 'pow',
- 'power',
- 'ps_current_thread_id',
- 'ps_thread_id',
- 'quote',
- 'radians',
- 'rand',
- 'random_bytes',
- 'regexp_instr',
- 'regexp_like',
- 'regexp_replace',
- 'regexp_substr',
- 'release_all_locks',
- 'release_lock',
- 'remove_dd_property_key',
- 'reverse',
- 'roles_graphml',
- 'round',
- 'rpad',
- 'rtrim',
- 'sec_to_time',
- 'session_user',
- 'sha',
- 'sha1',
- 'sha2',
- 'sign',
- 'sin',
- 'sleep',
- 'soundex',
- 'space',
- 'sqrt',
- 'st_area',
- 'st_asbinary',
- 'st_asgeojson',
- 'st_astext',
- 'st_aswkb',
- 'st_aswkt',
- 'st_buffer',
- 'st_buffer_strategy',
- 'st_centroid',
- 'st_contains',
- 'st_convexhull',
- 'st_crosses',
- 'st_difference',
- 'st_dimension',
- 'st_disjoint',
- 'st_distance',
- 'st_distance_sphere',
- 'st_endpoint',
- 'st_envelope',
- 'st_equals',
- 'st_exteriorring',
- 'st_geohash',
- 'st_geomcollfromtext',
- 'st_geomcollfromtxt',
- 'st_geomcollfromwkb',
- 'st_geometrycollectionfromtext',
- 'st_geometrycollectionfromwkb',
- 'st_geometryfromtext',
- 'st_geometryfromwkb',
- 'st_geometryn',
- 'st_geometrytype',
- 'st_geomfromgeojson',
- 'st_geomfromtext',
- 'st_geomfromwkb',
- 'st_interiorringn',
- 'st_intersection',
- 'st_intersects',
- 'st_isclosed',
- 'st_isempty',
- 'st_issimple',
- 'st_isvalid',
- 'st_latfromgeohash',
- 'st_latitude',
- 'st_length',
- 'st_linefromtext',
- 'st_linefromwkb',
- 'st_linestringfromtext',
- 'st_linestringfromwkb',
- 'st_longfromgeohash',
- 'st_longitude',
- 'st_makeenvelope',
- 'st_mlinefromtext',
- 'st_mlinefromwkb',
- 'st_mpointfromtext',
- 'st_mpointfromwkb',
- 'st_mpolyfromtext',
- 'st_mpolyfromwkb',
- 'st_multilinestringfromtext',
- 'st_multilinestringfromwkb',
- 'st_multipointfromtext',
- 'st_multipointfromwkb',
- 'st_multipolygonfromtext',
- 'st_multipolygonfromwkb',
- 'st_numgeometries',
- 'st_numinteriorring',
- 'st_numinteriorrings',
- 'st_numpoints',
- 'st_overlaps',
- 'st_pointfromgeohash',
- 'st_pointfromtext',
- 'st_pointfromwkb',
- 'st_pointn',
- 'st_polyfromtext',
- 'st_polyfromwkb',
- 'st_polygonfromtext',
- 'st_polygonfromwkb',
- 'st_simplify',
- 'st_srid',
- 'st_startpoint',
- 'st_swapxy',
- 'st_symdifference',
- 'st_touches',
- 'st_transform',
- 'st_union',
- 'st_validate',
- 'st_within',
- 'st_x',
- 'st_y',
- 'statement_digest',
- 'statement_digest_text',
- 'std',
- 'stddev',
- 'stddev_pop',
- 'stddev_samp',
- 'str_to_date',
- 'strcmp',
- 'subdate',
- 'substr',
- 'substring',
- 'substring_index',
- 'subtime',
- 'sum',
- 'sysdate',
- 'system_user',
- 'tan',
- 'time_format',
- 'time_to_sec',
- 'timediff',
- 'to_base64',
- 'to_days',
- 'to_seconds',
- 'trim',
- 'ucase',
- 'uncompress',
- 'uncompressed_length',
- 'unhex',
- 'unix_timestamp',
- 'updatexml',
- 'upper',
- 'uuid',
- 'uuid_short',
- 'uuid_to_bin',
- 'validate_password_strength',
- 'var_pop',
- 'var_samp',
- 'variance',
- 'version',
- 'wait_for_executed_gtid_set',
- 'wait_until_sql_thread_after_gtids',
- 'weekday',
- 'weekofyear',
- 'yearweek',
-)
-
-
-MYSQL_OPTIMIZER_HINTS = (
- 'bka',
- 'bnl',
- 'dupsweedout',
- 'firstmatch',
- 'group_index',
- 'hash_join',
- 'index',
- 'index_merge',
- 'intoexists',
- 'join_fixed_order',
- 'join_index',
- 'join_order',
- 'join_prefix',
- 'join_suffix',
- 'loosescan',
- 'materialization',
- 'max_execution_time',
- 'merge',
- 'mrr',
- 'no_bka',
- 'no_bnl',
- 'no_group_index',
- 'no_hash_join',
- 'no_icp',
- 'no_index',
- 'no_index_merge',
- 'no_join_index',
- 'no_merge',
- 'no_mrr',
- 'no_order_index',
- 'no_range_optimization',
- 'no_semijoin',
- 'no_skip_scan',
- 'order_index',
- 'qb_name',
- 'resource_group',
- 'semijoin',
- 'set_var',
- 'skip_scan',
- 'subquery',
-)
-
-
-MYSQL_KEYWORDS = (
- 'accessible',
- 'account',
- 'action',
- 'active',
- 'add',
- 'admin',
- 'after',
- 'against',
- 'aggregate',
- 'algorithm',
- 'all',
- 'alter',
- 'always',
- 'analyze',
- 'and',
- 'any',
- 'array',
- 'as',
- 'asc',
- 'ascii',
- 'asensitive',
- 'at',
- 'attribute',
- 'auto_increment',
- 'autoextend_size',
- 'avg',
- 'avg_row_length',
- 'backup',
- 'before',
- 'begin',
- 'between',
- 'binlog',
- 'block',
- 'both',
- 'btree',
- 'buckets',
- 'by',
- 'byte',
- 'cache',
- 'call',
- 'cascade',
- 'cascaded',
- 'case',
- 'catalog_name',
- 'chain',
- 'change',
- 'changed',
- 'channel',
- 'character',
- 'charset',
- 'check',
- 'checksum',
- 'cipher',
- 'class_origin',
- 'client',
- 'clone',
- 'close',
- 'coalesce',
- 'code',
- 'collate',
- 'collation',
- 'column',
- 'column_format',
- 'column_name',
- 'columns',
- 'comment',
- 'commit',
- 'committed',
- 'compact',
- 'completion',
- 'component',
- 'compressed',
- 'compression',
- 'concurrent',
- 'condition',
- 'connection',
- 'consistent',
- 'constraint',
- 'constraint_catalog',
- 'constraint_name',
- 'constraint_schema',
- 'contains',
- 'context',
- 'continue',
- 'convert',
- 'cpu',
- 'create',
- 'cross',
- 'cube',
- 'cume_dist',
- 'current',
- 'current_date',
- 'current_time',
- 'current_timestamp',
- 'current_user',
- 'cursor',
- 'cursor_name',
- 'data',
- 'database',
- 'databases',
- 'datafile',
- 'day',
- 'day_hour',
- 'day_microsecond',
- 'day_minute',
- 'day_second',
- 'deallocate',
- 'declare',
- 'default',
- 'default_auth',
- 'definer',
- 'definition',
- 'delay_key_write',
- 'delayed',
- 'delete',
- 'dense_rank',
- 'desc',
- 'describe',
- 'description',
- 'deterministic',
- 'diagnostics',
- 'directory',
- 'disable',
- 'discard',
- 'disk',
- 'distinct',
- 'distinctrow',
- 'div',
- 'do',
- 'drop',
- 'dual',
- 'dumpfile',
- 'duplicate',
- 'dynamic',
- 'each',
- 'else',
- 'elseif',
- 'empty',
- 'enable',
- 'enclosed',
- 'encryption',
- 'end',
- 'ends',
- 'enforced',
- 'engine',
- 'engine_attribute',
- 'engines',
- 'error',
- 'errors',
- 'escape',
- 'escaped',
- 'event',
- 'events',
- 'every',
- 'except',
- 'exchange',
- 'exclude',
- 'execute',
- 'exists',
- 'exit',
- 'expansion',
- 'expire',
- 'explain',
- 'export',
- 'extended',
- 'extent_size',
- 'failed_login_attempts',
- 'false',
- 'fast',
- 'faults',
- 'fetch',
- 'fields',
- 'file',
- 'file_block_size',
- 'filter',
- 'first',
- 'first_value',
- 'flush',
- 'following',
- 'follows',
- 'for',
- 'force',
- 'foreign',
- 'format',
- 'found',
- 'from',
- 'full',
- 'fulltext',
- 'function',
- 'general',
- 'generated',
- 'geomcollection',
- 'get',
- 'get_format',
- 'get_master_public_key',
- 'global',
- 'grant',
- 'grants',
- 'group',
- 'group_replication',
- 'grouping',
- 'groups',
- 'handler',
- 'hash',
- 'having',
- 'help',
- 'high_priority',
- 'histogram',
- 'history',
- 'host',
- 'hosts',
- 'hour',
- 'hour_microsecond',
- 'hour_minute',
- 'hour_second',
- 'identified',
- 'if',
- 'ignore',
- 'ignore_server_ids',
- 'import',
- 'in',
- 'inactive',
- 'index',
- 'indexes',
- 'infile',
- 'initial_size',
- 'inner',
- 'inout',
- 'insensitive',
- 'insert',
- 'insert_method',
- 'install',
- 'instance',
- 'interval',
- 'into',
- 'invisible',
- 'invoker',
- 'io',
- 'io_after_gtids',
- 'io_before_gtids',
- 'io_thread',
- 'ipc',
- 'is',
- 'isolation',
- 'issuer',
- 'iterate',
- 'join',
- 'json_table',
- 'json_value',
- 'key',
- 'key_block_size',
- 'keys',
- 'kill',
- 'lag',
- 'language',
- 'last',
- 'last_value',
- 'lateral',
- 'lead',
- 'leading',
- 'leave',
- 'leaves',
- 'left',
- 'less',
- 'level',
- 'like',
- 'limit',
- 'linear',
- 'lines',
- 'list',
- 'load',
- 'local',
- 'localtime',
- 'localtimestamp',
- 'lock',
- 'locked',
- 'locks',
- 'logfile',
- 'logs',
- 'loop',
- 'low_priority',
- 'master',
- 'master_auto_position',
- 'master_bind',
- 'master_compression_algorithms',
- 'master_connect_retry',
- 'master_delay',
- 'master_heartbeat_period',
- 'master_host',
- 'master_log_file',
- 'master_log_pos',
- 'master_password',
- 'master_port',
- 'master_public_key_path',
- 'master_retry_count',
- 'master_server_id',
- 'master_ssl',
- 'master_ssl_ca',
- 'master_ssl_capath',
- 'master_ssl_cert',
- 'master_ssl_cipher',
- 'master_ssl_crl',
- 'master_ssl_crlpath',
- 'master_ssl_key',
- 'master_ssl_verify_server_cert',
- 'master_tls_ciphersuites',
- 'master_tls_version',
- 'master_user',
- 'master_zstd_compression_level',
- 'match',
- 'max_connections_per_hour',
- 'max_queries_per_hour',
- 'max_rows',
- 'max_size',
- 'max_updates_per_hour',
- 'max_user_connections',
- 'maxvalue',
- 'medium',
- 'member',
- 'memory',
- 'merge',
- 'message_text',
- 'microsecond',
- 'migrate',
- 'min_rows',
- 'minute',
- 'minute_microsecond',
- 'minute_second',
- 'mod',
- 'mode',
- 'modifies',
- 'modify',
- 'month',
- 'mutex',
- 'mysql_errno',
- 'name',
- 'names',
- 'natural',
- 'ndb',
- 'ndbcluster',
- 'nested',
- 'network_namespace',
- 'never',
- 'new',
- 'next',
- 'no',
- 'no_wait',
- 'no_write_to_binlog',
- 'nodegroup',
- 'none',
- 'not',
- 'nowait',
- 'nth_value',
- 'ntile',
- 'null',
- 'nulls',
- 'number',
- 'of',
- 'off',
- 'offset',
- 'oj',
- 'old',
- 'on',
- 'one',
- 'only',
- 'open',
- 'optimize',
- 'optimizer_costs',
- 'option',
- 'optional',
- 'optionally',
- 'options',
- 'or',
- 'order',
- 'ordinality',
- 'organization',
- 'others',
- 'out',
- 'outer',
- 'outfile',
- 'over',
- 'owner',
- 'pack_keys',
- 'page',
- 'parser',
- 'partial',
- 'partition',
- 'partitioning',
- 'partitions',
- 'password',
- 'password_lock_time',
- 'path',
- 'percent_rank',
- 'persist',
- 'persist_only',
- 'phase',
- 'plugin',
- 'plugin_dir',
- 'plugins',
- 'port',
- 'precedes',
- 'preceding',
- 'prepare',
- 'preserve',
- 'prev',
- 'primary',
- 'privilege_checks_user',
- 'privileges',
- 'procedure',
- 'process',
- 'processlist',
- 'profile',
- 'profiles',
- 'proxy',
- 'purge',
- 'quarter',
- 'query',
- 'quick',
- 'random',
- 'range',
- 'rank',
- 'read',
- 'read_only',
- 'read_write',
- 'reads',
- 'rebuild',
- 'recover',
- 'recursive',
- 'redo_buffer_size',
- 'redundant',
- 'reference',
- 'references',
- 'regexp',
- 'relay',
- 'relay_log_file',
- 'relay_log_pos',
- 'relay_thread',
- 'relaylog',
- 'release',
- 'reload',
- 'remove',
- 'rename',
- 'reorganize',
- 'repair',
- 'repeat',
- 'repeatable',
- 'replace',
- 'replicate_do_db',
- 'replicate_do_table',
- 'replicate_ignore_db',
- 'replicate_ignore_table',
- 'replicate_rewrite_db',
- 'replicate_wild_do_table',
- 'replicate_wild_ignore_table',
- 'replication',
- 'require',
- 'require_row_format',
- 'require_table_primary_key_check',
- 'reset',
- 'resignal',
- 'resource',
- 'respect',
- 'restart',
- 'restore',
- 'restrict',
- 'resume',
- 'retain',
- 'return',
- 'returned_sqlstate',
- 'returning',
- 'returns',
- 'reuse',
- 'reverse',
- 'revoke',
- 'right',
- 'rlike',
- 'role',
- 'rollback',
- 'rollup',
- 'rotate',
- 'routine',
- 'row',
- 'row_count',
- 'row_format',
- 'row_number',
- 'rows',
- 'rtree',
- 'savepoint',
- 'schedule',
- 'schema',
- 'schema_name',
- 'schemas',
- 'second',
- 'second_microsecond',
- 'secondary',
- 'secondary_engine',
- 'secondary_engine_attribute',
- 'secondary_load',
- 'secondary_unload',
- 'security',
- 'select',
- 'sensitive',
- 'separator',
- 'serializable',
- 'server',
- 'session',
- 'share',
- 'show',
- 'shutdown',
- 'signal',
- 'signed',
- 'simple',
- 'skip',
- 'slave',
- 'slow',
- 'snapshot',
- 'socket',
- 'some',
- 'soname',
- 'sounds',
- 'source',
- 'spatial',
- 'specific',
- 'sql',
- 'sql_after_gtids',
- 'sql_after_mts_gaps',
- 'sql_before_gtids',
- 'sql_big_result',
- 'sql_buffer_result',
- 'sql_calc_found_rows',
- 'sql_no_cache',
- 'sql_small_result',
- 'sql_thread',
- 'sql_tsi_day',
- 'sql_tsi_hour',
- 'sql_tsi_minute',
- 'sql_tsi_month',
- 'sql_tsi_quarter',
- 'sql_tsi_second',
- 'sql_tsi_week',
- 'sql_tsi_year',
- 'sqlexception',
- 'sqlstate',
- 'sqlwarning',
- 'srid',
- 'ssl',
- 'stacked',
- 'start',
- 'starting',
- 'starts',
- 'stats_auto_recalc',
- 'stats_persistent',
- 'stats_sample_pages',
- 'status',
- 'stop',
- 'storage',
- 'stored',
- 'straight_join',
- 'stream',
- 'string',
- 'subclass_origin',
- 'subject',
- 'subpartition',
- 'subpartitions',
- 'super',
- 'suspend',
- 'swaps',
- 'switches',
- 'system',
- 'table',
- 'table_checksum',
- 'table_name',
- 'tables',
- 'tablespace',
- 'temporary',
- 'temptable',
- 'terminated',
- 'than',
- 'then',
- 'thread_priority',
- 'ties',
- 'timestampadd',
- 'timestampdiff',
- 'tls',
- 'to',
- 'trailing',
- 'transaction',
- 'trigger',
- 'triggers',
- 'true',
- 'truncate',
- 'type',
- 'types',
- 'unbounded',
- 'uncommitted',
- 'undefined',
- 'undo',
- 'undo_buffer_size',
- 'undofile',
- 'unicode',
- 'uninstall',
- 'union',
- 'unique',
- 'unknown',
- 'unlock',
- 'unsigned',
- 'until',
- 'update',
- 'upgrade',
- 'usage',
- 'use',
- 'use_frm',
- 'user',
- 'user_resources',
- 'using',
- 'utc_date',
- 'utc_time',
- 'utc_timestamp',
- 'validation',
- 'value',
- 'values',
- 'variables',
- 'vcpu',
- 'view',
- 'virtual',
- 'visible',
- 'wait',
- 'warnings',
- 'week',
- 'weight_string',
- 'when',
- 'where',
- 'while',
- 'window',
- 'with',
- 'without',
- 'work',
- 'wrapper',
- 'write',
- 'x509',
- 'xa',
- 'xid',
- 'xml',
- 'xor',
- 'year_month',
- 'zerofill',
-)
-
-
-if __name__ == '__main__': # pragma: no cover
- import re
- from urllib.request import urlopen
-
- from pygments.util import format_lines
-
- # MySQL source code
- SOURCE_URL = 'https://github.com/mysql/mysql-server/raw/8.0'
- LEX_URL = SOURCE_URL + '/sql/lex.h'
- ITEM_CREATE_URL = SOURCE_URL + '/sql/item_create.cc'
-
-
- def update_myself():
- # Pull content from lex.h.
- lex_file = urlopen(LEX_URL).read().decode('utf8', errors='ignore')
- keywords = parse_lex_keywords(lex_file)
- functions = parse_lex_functions(lex_file)
- optimizer_hints = parse_lex_optimizer_hints(lex_file)
-
- # Parse content in item_create.cc.
- item_create_file = urlopen(ITEM_CREATE_URL).read().decode('utf8', errors='ignore')
- functions.update(parse_item_create_functions(item_create_file))
-
- # Remove data types from the set of keywords.
- keywords -= set(MYSQL_DATATYPES)
-
- update_content('MYSQL_FUNCTIONS', tuple(sorted(functions)))
- update_content('MYSQL_KEYWORDS', tuple(sorted(keywords)))
- update_content('MYSQL_OPTIMIZER_HINTS', tuple(sorted(optimizer_hints)))
-
-
- def parse_lex_keywords(f):
- """Parse keywords in lex.h."""
-
- results = set()
- for m in re.finditer(r'{SYM(?:_HK)?\("(?P<keyword>[a-z0-9_]+)",', f, flags=re.I):
- results.add(m.group('keyword').lower())
-
- if not results:
- raise ValueError('No keywords found')
-
- return results
-
-
- def parse_lex_optimizer_hints(f):
- """Parse optimizer hints in lex.h."""
-
- results = set()
- for m in re.finditer(r'{SYM_H\("(?P<keyword>[a-z0-9_]+)",', f, flags=re.I):
- results.add(m.group('keyword').lower())
-
- if not results:
- raise ValueError('No optimizer hints found')
-
- return results
-
-
- def parse_lex_functions(f):
- """Parse MySQL function names from lex.h."""
-
- results = set()
- for m in re.finditer(r'{SYM_FN?\("(?P<function>[a-z0-9_]+)",', f, flags=re.I):
- results.add(m.group('function').lower())
-
- if not results:
- raise ValueError('No lex functions found')
-
- return results
-
-
- def parse_item_create_functions(f):
- """Parse MySQL function names from item_create.cc."""
-
- results = set()
- for m in re.finditer(r'{"(?P<function>[^"]+?)",\s*SQL_F[^(]+?\(', f, flags=re.I):
- results.add(m.group('function').lower())
-
- if not results:
- raise ValueError('No item_create functions found')
-
- return results
-
-
- def update_content(field_name, content):
- """Overwrite this file with content parsed from MySQL's source code."""
-
- with open(__file__) as f:
- data = f.read()
-
- # Line to start/end inserting
- re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % field_name, re.M | re.S)
- m = re_match.search(data)
- if not m:
- raise ValueError('Could not find an existing definition for %s' % field_name)
-
- new_block = format_lines(field_name, content)
- data = data[:m.start()] + new_block + data[m.end():]
-
- with open(__file__, 'w', newline='\n') as f:
- f.write(data)
-
- update_myself()
+"""
+ pygments.lexers._mysql_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Self-updating data files for the MySQL lexer.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+
+MYSQL_CONSTANTS = (
+ 'false',
+ 'null',
+ 'true',
+ 'unknown',
+)
+
+
+# At this time, no easily-parsed, definitive list of data types
+# has been found in the MySQL source code or documentation. (The
+# `sql/sql_yacc.yy` file is definitive but is difficult to parse.)
+# Therefore these types are currently maintained manually.
+#
+# Some words in this list -- like "long", "national", "precision",
+# and "varying" -- appear to only occur in combination with other
+# data type keywords. Therefore they are included as separate words
+# even though they do not naturally occur in syntax separately.
+#
+# This list is also used to strip data types out of the list of
+# MySQL keywords, which is automatically updated later in the file.
+#
+MYSQL_DATATYPES = (
+ # Numeric data types
+ 'bigint',
+ 'bit',
+ 'bool',
+ 'boolean',
+ 'dec',
+ 'decimal',
+ 'double',
+ 'fixed',
+ 'float',
+ 'float4',
+ 'float8',
+ 'int',
+ 'int1',
+ 'int2',
+ 'int3',
+ 'int4',
+ 'int8',
+ 'integer',
+ 'mediumint',
+ 'middleint',
+ 'numeric',
+ 'precision',
+ 'real',
+ 'serial',
+ 'smallint',
+ 'tinyint',
+
+ # Date and time data types
+ 'date',
+ 'datetime',
+ 'time',
+ 'timestamp',
+ 'year',
+
+ # String data types
+ 'binary',
+ 'blob',
+ 'char',
+ 'enum',
+ 'long',
+ 'longblob',
+ 'longtext',
+ 'mediumblob',
+ 'mediumtext',
+ 'national',
+ 'nchar',
+ 'nvarchar',
+ 'set',
+ 'text',
+ 'tinyblob',
+ 'tinytext',
+ 'varbinary',
+ 'varchar',
+ 'varcharacter',
+ 'varying',
+
+ # Spatial data types
+ 'geometry',
+ 'geometrycollection',
+ 'linestring',
+ 'multilinestring',
+ 'multipoint',
+ 'multipolygon',
+ 'point',
+ 'polygon',
+
+ # JSON data types
+ 'json',
+)
+
+# Everything below this line is auto-generated from the MySQL source code.
+# Run this file in Python and it will update itself.
+# -----------------------------------------------------------------------------
+
+MYSQL_FUNCTIONS = (
+ 'abs',
+ 'acos',
+ 'adddate',
+ 'addtime',
+ 'aes_decrypt',
+ 'aes_encrypt',
+ 'any_value',
+ 'asin',
+ 'atan',
+ 'atan2',
+ 'benchmark',
+ 'bin',
+ 'bin_to_uuid',
+ 'bit_and',
+ 'bit_count',
+ 'bit_length',
+ 'bit_or',
+ 'bit_xor',
+ 'can_access_column',
+ 'can_access_database',
+ 'can_access_event',
+ 'can_access_resource_group',
+ 'can_access_routine',
+ 'can_access_table',
+ 'can_access_trigger',
+ 'can_access_view',
+ 'cast',
+ 'ceil',
+ 'ceiling',
+ 'char_length',
+ 'character_length',
+ 'coercibility',
+ 'compress',
+ 'concat',
+ 'concat_ws',
+ 'connection_id',
+ 'conv',
+ 'convert_cpu_id_mask',
+ 'convert_interval_to_user_interval',
+ 'convert_tz',
+ 'cos',
+ 'cot',
+ 'count',
+ 'crc32',
+ 'curdate',
+ 'current_role',
+ 'curtime',
+ 'date_add',
+ 'date_format',
+ 'date_sub',
+ 'datediff',
+ 'dayname',
+ 'dayofmonth',
+ 'dayofweek',
+ 'dayofyear',
+ 'degrees',
+ 'elt',
+ 'exp',
+ 'export_set',
+ 'extract',
+ 'extractvalue',
+ 'field',
+ 'find_in_set',
+ 'floor',
+ 'format_bytes',
+ 'format_pico_time',
+ 'found_rows',
+ 'from_base64',
+ 'from_days',
+ 'from_unixtime',
+ 'get_dd_column_privileges',
+ 'get_dd_create_options',
+ 'get_dd_index_private_data',
+ 'get_dd_index_sub_part_length',
+ 'get_dd_property_key_value',
+ 'get_dd_tablespace_private_data',
+ 'get_lock',
+ 'greatest',
+ 'group_concat',
+ 'gtid_subset',
+ 'gtid_subtract',
+ 'hex',
+ 'icu_version',
+ 'ifnull',
+ 'inet6_aton',
+ 'inet6_ntoa',
+ 'inet_aton',
+ 'inet_ntoa',
+ 'instr',
+ 'internal_auto_increment',
+ 'internal_avg_row_length',
+ 'internal_check_time',
+ 'internal_checksum',
+ 'internal_data_free',
+ 'internal_data_length',
+ 'internal_dd_char_length',
+ 'internal_get_comment_or_error',
+ 'internal_get_dd_column_extra',
+ 'internal_get_enabled_role_json',
+ 'internal_get_hostname',
+ 'internal_get_mandatory_roles_json',
+ 'internal_get_partition_nodegroup',
+ 'internal_get_username',
+ 'internal_get_view_warning_or_error',
+ 'internal_index_column_cardinality',
+ 'internal_index_length',
+ 'internal_is_enabled_role',
+ 'internal_is_mandatory_role',
+ 'internal_keys_disabled',
+ 'internal_max_data_length',
+ 'internal_table_rows',
+ 'internal_tablespace_autoextend_size',
+ 'internal_tablespace_data_free',
+ 'internal_tablespace_extent_size',
+ 'internal_tablespace_extra',
+ 'internal_tablespace_free_extents',
+ 'internal_tablespace_id',
+ 'internal_tablespace_initial_size',
+ 'internal_tablespace_logfile_group_name',
+ 'internal_tablespace_logfile_group_number',
+ 'internal_tablespace_maximum_size',
+ 'internal_tablespace_row_format',
+ 'internal_tablespace_status',
+ 'internal_tablespace_total_extents',
+ 'internal_tablespace_type',
+ 'internal_tablespace_version',
+ 'internal_update_time',
+ 'is_free_lock',
+ 'is_ipv4',
+ 'is_ipv4_compat',
+ 'is_ipv4_mapped',
+ 'is_ipv6',
+ 'is_used_lock',
+ 'is_uuid',
+ 'is_visible_dd_object',
+ 'isnull',
+ 'json_array',
+ 'json_array_append',
+ 'json_array_insert',
+ 'json_arrayagg',
+ 'json_contains',
+ 'json_contains_path',
+ 'json_depth',
+ 'json_extract',
+ 'json_insert',
+ 'json_keys',
+ 'json_length',
+ 'json_merge',
+ 'json_merge_patch',
+ 'json_merge_preserve',
+ 'json_object',
+ 'json_objectagg',
+ 'json_overlaps',
+ 'json_pretty',
+ 'json_quote',
+ 'json_remove',
+ 'json_replace',
+ 'json_schema_valid',
+ 'json_schema_validation_report',
+ 'json_search',
+ 'json_set',
+ 'json_storage_free',
+ 'json_storage_size',
+ 'json_type',
+ 'json_unquote',
+ 'json_valid',
+ 'last_day',
+ 'last_insert_id',
+ 'lcase',
+ 'least',
+ 'length',
+ 'like_range_max',
+ 'like_range_min',
+ 'ln',
+ 'load_file',
+ 'locate',
+ 'log',
+ 'log10',
+ 'log2',
+ 'lower',
+ 'lpad',
+ 'ltrim',
+ 'make_set',
+ 'makedate',
+ 'maketime',
+ 'master_pos_wait',
+ 'max',
+ 'mbrcontains',
+ 'mbrcoveredby',
+ 'mbrcovers',
+ 'mbrdisjoint',
+ 'mbrequals',
+ 'mbrintersects',
+ 'mbroverlaps',
+ 'mbrtouches',
+ 'mbrwithin',
+ 'md5',
+ 'mid',
+ 'min',
+ 'monthname',
+ 'name_const',
+ 'now',
+ 'nullif',
+ 'oct',
+ 'octet_length',
+ 'ord',
+ 'period_add',
+ 'period_diff',
+ 'pi',
+ 'position',
+ 'pow',
+ 'power',
+ 'ps_current_thread_id',
+ 'ps_thread_id',
+ 'quote',
+ 'radians',
+ 'rand',
+ 'random_bytes',
+ 'regexp_instr',
+ 'regexp_like',
+ 'regexp_replace',
+ 'regexp_substr',
+ 'release_all_locks',
+ 'release_lock',
+ 'remove_dd_property_key',
+ 'reverse',
+ 'roles_graphml',
+ 'round',
+ 'rpad',
+ 'rtrim',
+ 'sec_to_time',
+ 'session_user',
+ 'sha',
+ 'sha1',
+ 'sha2',
+ 'sign',
+ 'sin',
+ 'sleep',
+ 'soundex',
+ 'space',
+ 'sqrt',
+ 'st_area',
+ 'st_asbinary',
+ 'st_asgeojson',
+ 'st_astext',
+ 'st_aswkb',
+ 'st_aswkt',
+ 'st_buffer',
+ 'st_buffer_strategy',
+ 'st_centroid',
+ 'st_contains',
+ 'st_convexhull',
+ 'st_crosses',
+ 'st_difference',
+ 'st_dimension',
+ 'st_disjoint',
+ 'st_distance',
+ 'st_distance_sphere',
+ 'st_endpoint',
+ 'st_envelope',
+ 'st_equals',
+ 'st_exteriorring',
+ 'st_geohash',
+ 'st_geomcollfromtext',
+ 'st_geomcollfromtxt',
+ 'st_geomcollfromwkb',
+ 'st_geometrycollectionfromtext',
+ 'st_geometrycollectionfromwkb',
+ 'st_geometryfromtext',
+ 'st_geometryfromwkb',
+ 'st_geometryn',
+ 'st_geometrytype',
+ 'st_geomfromgeojson',
+ 'st_geomfromtext',
+ 'st_geomfromwkb',
+ 'st_interiorringn',
+ 'st_intersection',
+ 'st_intersects',
+ 'st_isclosed',
+ 'st_isempty',
+ 'st_issimple',
+ 'st_isvalid',
+ 'st_latfromgeohash',
+ 'st_latitude',
+ 'st_length',
+ 'st_linefromtext',
+ 'st_linefromwkb',
+ 'st_linestringfromtext',
+ 'st_linestringfromwkb',
+ 'st_longfromgeohash',
+ 'st_longitude',
+ 'st_makeenvelope',
+ 'st_mlinefromtext',
+ 'st_mlinefromwkb',
+ 'st_mpointfromtext',
+ 'st_mpointfromwkb',
+ 'st_mpolyfromtext',
+ 'st_mpolyfromwkb',
+ 'st_multilinestringfromtext',
+ 'st_multilinestringfromwkb',
+ 'st_multipointfromtext',
+ 'st_multipointfromwkb',
+ 'st_multipolygonfromtext',
+ 'st_multipolygonfromwkb',
+ 'st_numgeometries',
+ 'st_numinteriorring',
+ 'st_numinteriorrings',
+ 'st_numpoints',
+ 'st_overlaps',
+ 'st_pointfromgeohash',
+ 'st_pointfromtext',
+ 'st_pointfromwkb',
+ 'st_pointn',
+ 'st_polyfromtext',
+ 'st_polyfromwkb',
+ 'st_polygonfromtext',
+ 'st_polygonfromwkb',
+ 'st_simplify',
+ 'st_srid',
+ 'st_startpoint',
+ 'st_swapxy',
+ 'st_symdifference',
+ 'st_touches',
+ 'st_transform',
+ 'st_union',
+ 'st_validate',
+ 'st_within',
+ 'st_x',
+ 'st_y',
+ 'statement_digest',
+ 'statement_digest_text',
+ 'std',
+ 'stddev',
+ 'stddev_pop',
+ 'stddev_samp',
+ 'str_to_date',
+ 'strcmp',
+ 'subdate',
+ 'substr',
+ 'substring',
+ 'substring_index',
+ 'subtime',
+ 'sum',
+ 'sysdate',
+ 'system_user',
+ 'tan',
+ 'time_format',
+ 'time_to_sec',
+ 'timediff',
+ 'to_base64',
+ 'to_days',
+ 'to_seconds',
+ 'trim',
+ 'ucase',
+ 'uncompress',
+ 'uncompressed_length',
+ 'unhex',
+ 'unix_timestamp',
+ 'updatexml',
+ 'upper',
+ 'uuid',
+ 'uuid_short',
+ 'uuid_to_bin',
+ 'validate_password_strength',
+ 'var_pop',
+ 'var_samp',
+ 'variance',
+ 'version',
+ 'wait_for_executed_gtid_set',
+ 'wait_until_sql_thread_after_gtids',
+ 'weekday',
+ 'weekofyear',
+ 'yearweek',
+)
+
+
+MYSQL_OPTIMIZER_HINTS = (
+ 'bka',
+ 'bnl',
+ 'dupsweedout',
+ 'firstmatch',
+ 'group_index',
+ 'hash_join',
+ 'index',
+ 'index_merge',
+ 'intoexists',
+ 'join_fixed_order',
+ 'join_index',
+ 'join_order',
+ 'join_prefix',
+ 'join_suffix',
+ 'loosescan',
+ 'materialization',
+ 'max_execution_time',
+ 'merge',
+ 'mrr',
+ 'no_bka',
+ 'no_bnl',
+ 'no_group_index',
+ 'no_hash_join',
+ 'no_icp',
+ 'no_index',
+ 'no_index_merge',
+ 'no_join_index',
+ 'no_merge',
+ 'no_mrr',
+ 'no_order_index',
+ 'no_range_optimization',
+ 'no_semijoin',
+ 'no_skip_scan',
+ 'order_index',
+ 'qb_name',
+ 'resource_group',
+ 'semijoin',
+ 'set_var',
+ 'skip_scan',
+ 'subquery',
+)
+
+
+MYSQL_KEYWORDS = (
+ 'accessible',
+ 'account',
+ 'action',
+ 'active',
+ 'add',
+ 'admin',
+ 'after',
+ 'against',
+ 'aggregate',
+ 'algorithm',
+ 'all',
+ 'alter',
+ 'always',
+ 'analyze',
+ 'and',
+ 'any',
+ 'array',
+ 'as',
+ 'asc',
+ 'ascii',
+ 'asensitive',
+ 'at',
+ 'attribute',
+ 'auto_increment',
+ 'autoextend_size',
+ 'avg',
+ 'avg_row_length',
+ 'backup',
+ 'before',
+ 'begin',
+ 'between',
+ 'binlog',
+ 'block',
+ 'both',
+ 'btree',
+ 'buckets',
+ 'by',
+ 'byte',
+ 'cache',
+ 'call',
+ 'cascade',
+ 'cascaded',
+ 'case',
+ 'catalog_name',
+ 'chain',
+ 'change',
+ 'changed',
+ 'channel',
+ 'character',
+ 'charset',
+ 'check',
+ 'checksum',
+ 'cipher',
+ 'class_origin',
+ 'client',
+ 'clone',
+ 'close',
+ 'coalesce',
+ 'code',
+ 'collate',
+ 'collation',
+ 'column',
+ 'column_format',
+ 'column_name',
+ 'columns',
+ 'comment',
+ 'commit',
+ 'committed',
+ 'compact',
+ 'completion',
+ 'component',
+ 'compressed',
+ 'compression',
+ 'concurrent',
+ 'condition',
+ 'connection',
+ 'consistent',
+ 'constraint',
+ 'constraint_catalog',
+ 'constraint_name',
+ 'constraint_schema',
+ 'contains',
+ 'context',
+ 'continue',
+ 'convert',
+ 'cpu',
+ 'create',
+ 'cross',
+ 'cube',
+ 'cume_dist',
+ 'current',
+ 'current_date',
+ 'current_time',
+ 'current_timestamp',
+ 'current_user',
+ 'cursor',
+ 'cursor_name',
+ 'data',
+ 'database',
+ 'databases',
+ 'datafile',
+ 'day',
+ 'day_hour',
+ 'day_microsecond',
+ 'day_minute',
+ 'day_second',
+ 'deallocate',
+ 'declare',
+ 'default',
+ 'default_auth',
+ 'definer',
+ 'definition',
+ 'delay_key_write',
+ 'delayed',
+ 'delete',
+ 'dense_rank',
+ 'desc',
+ 'describe',
+ 'description',
+ 'deterministic',
+ 'diagnostics',
+ 'directory',
+ 'disable',
+ 'discard',
+ 'disk',
+ 'distinct',
+ 'distinctrow',
+ 'div',
+ 'do',
+ 'drop',
+ 'dual',
+ 'dumpfile',
+ 'duplicate',
+ 'dynamic',
+ 'each',
+ 'else',
+ 'elseif',
+ 'empty',
+ 'enable',
+ 'enclosed',
+ 'encryption',
+ 'end',
+ 'ends',
+ 'enforced',
+ 'engine',
+ 'engine_attribute',
+ 'engines',
+ 'error',
+ 'errors',
+ 'escape',
+ 'escaped',
+ 'event',
+ 'events',
+ 'every',
+ 'except',
+ 'exchange',
+ 'exclude',
+ 'execute',
+ 'exists',
+ 'exit',
+ 'expansion',
+ 'expire',
+ 'explain',
+ 'export',
+ 'extended',
+ 'extent_size',
+ 'failed_login_attempts',
+ 'false',
+ 'fast',
+ 'faults',
+ 'fetch',
+ 'fields',
+ 'file',
+ 'file_block_size',
+ 'filter',
+ 'first',
+ 'first_value',
+ 'flush',
+ 'following',
+ 'follows',
+ 'for',
+ 'force',
+ 'foreign',
+ 'format',
+ 'found',
+ 'from',
+ 'full',
+ 'fulltext',
+ 'function',
+ 'general',
+ 'generated',
+ 'geomcollection',
+ 'get',
+ 'get_format',
+ 'get_master_public_key',
+ 'global',
+ 'grant',
+ 'grants',
+ 'group',
+ 'group_replication',
+ 'grouping',
+ 'groups',
+ 'handler',
+ 'hash',
+ 'having',
+ 'help',
+ 'high_priority',
+ 'histogram',
+ 'history',
+ 'host',
+ 'hosts',
+ 'hour',
+ 'hour_microsecond',
+ 'hour_minute',
+ 'hour_second',
+ 'identified',
+ 'if',
+ 'ignore',
+ 'ignore_server_ids',
+ 'import',
+ 'in',
+ 'inactive',
+ 'index',
+ 'indexes',
+ 'infile',
+ 'initial_size',
+ 'inner',
+ 'inout',
+ 'insensitive',
+ 'insert',
+ 'insert_method',
+ 'install',
+ 'instance',
+ 'interval',
+ 'into',
+ 'invisible',
+ 'invoker',
+ 'io',
+ 'io_after_gtids',
+ 'io_before_gtids',
+ 'io_thread',
+ 'ipc',
+ 'is',
+ 'isolation',
+ 'issuer',
+ 'iterate',
+ 'join',
+ 'json_table',
+ 'json_value',
+ 'key',
+ 'key_block_size',
+ 'keys',
+ 'kill',
+ 'lag',
+ 'language',
+ 'last',
+ 'last_value',
+ 'lateral',
+ 'lead',
+ 'leading',
+ 'leave',
+ 'leaves',
+ 'left',
+ 'less',
+ 'level',
+ 'like',
+ 'limit',
+ 'linear',
+ 'lines',
+ 'list',
+ 'load',
+ 'local',
+ 'localtime',
+ 'localtimestamp',
+ 'lock',
+ 'locked',
+ 'locks',
+ 'logfile',
+ 'logs',
+ 'loop',
+ 'low_priority',
+ 'master',
+ 'master_auto_position',
+ 'master_bind',
+ 'master_compression_algorithms',
+ 'master_connect_retry',
+ 'master_delay',
+ 'master_heartbeat_period',
+ 'master_host',
+ 'master_log_file',
+ 'master_log_pos',
+ 'master_password',
+ 'master_port',
+ 'master_public_key_path',
+ 'master_retry_count',
+ 'master_server_id',
+ 'master_ssl',
+ 'master_ssl_ca',
+ 'master_ssl_capath',
+ 'master_ssl_cert',
+ 'master_ssl_cipher',
+ 'master_ssl_crl',
+ 'master_ssl_crlpath',
+ 'master_ssl_key',
+ 'master_ssl_verify_server_cert',
+ 'master_tls_ciphersuites',
+ 'master_tls_version',
+ 'master_user',
+ 'master_zstd_compression_level',
+ 'match',
+ 'max_connections_per_hour',
+ 'max_queries_per_hour',
+ 'max_rows',
+ 'max_size',
+ 'max_updates_per_hour',
+ 'max_user_connections',
+ 'maxvalue',
+ 'medium',
+ 'member',
+ 'memory',
+ 'merge',
+ 'message_text',
+ 'microsecond',
+ 'migrate',
+ 'min_rows',
+ 'minute',
+ 'minute_microsecond',
+ 'minute_second',
+ 'mod',
+ 'mode',
+ 'modifies',
+ 'modify',
+ 'month',
+ 'mutex',
+ 'mysql_errno',
+ 'name',
+ 'names',
+ 'natural',
+ 'ndb',
+ 'ndbcluster',
+ 'nested',
+ 'network_namespace',
+ 'never',
+ 'new',
+ 'next',
+ 'no',
+ 'no_wait',
+ 'no_write_to_binlog',
+ 'nodegroup',
+ 'none',
+ 'not',
+ 'nowait',
+ 'nth_value',
+ 'ntile',
+ 'null',
+ 'nulls',
+ 'number',
+ 'of',
+ 'off',
+ 'offset',
+ 'oj',
+ 'old',
+ 'on',
+ 'one',
+ 'only',
+ 'open',
+ 'optimize',
+ 'optimizer_costs',
+ 'option',
+ 'optional',
+ 'optionally',
+ 'options',
+ 'or',
+ 'order',
+ 'ordinality',
+ 'organization',
+ 'others',
+ 'out',
+ 'outer',
+ 'outfile',
+ 'over',
+ 'owner',
+ 'pack_keys',
+ 'page',
+ 'parser',
+ 'partial',
+ 'partition',
+ 'partitioning',
+ 'partitions',
+ 'password',
+ 'password_lock_time',
+ 'path',
+ 'percent_rank',
+ 'persist',
+ 'persist_only',
+ 'phase',
+ 'plugin',
+ 'plugin_dir',
+ 'plugins',
+ 'port',
+ 'precedes',
+ 'preceding',
+ 'prepare',
+ 'preserve',
+ 'prev',
+ 'primary',
+ 'privilege_checks_user',
+ 'privileges',
+ 'procedure',
+ 'process',
+ 'processlist',
+ 'profile',
+ 'profiles',
+ 'proxy',
+ 'purge',
+ 'quarter',
+ 'query',
+ 'quick',
+ 'random',
+ 'range',
+ 'rank',
+ 'read',
+ 'read_only',
+ 'read_write',
+ 'reads',
+ 'rebuild',
+ 'recover',
+ 'recursive',
+ 'redo_buffer_size',
+ 'redundant',
+ 'reference',
+ 'references',
+ 'regexp',
+ 'relay',
+ 'relay_log_file',
+ 'relay_log_pos',
+ 'relay_thread',
+ 'relaylog',
+ 'release',
+ 'reload',
+ 'remove',
+ 'rename',
+ 'reorganize',
+ 'repair',
+ 'repeat',
+ 'repeatable',
+ 'replace',
+ 'replicate_do_db',
+ 'replicate_do_table',
+ 'replicate_ignore_db',
+ 'replicate_ignore_table',
+ 'replicate_rewrite_db',
+ 'replicate_wild_do_table',
+ 'replicate_wild_ignore_table',
+ 'replication',
+ 'require',
+ 'require_row_format',
+ 'require_table_primary_key_check',
+ 'reset',
+ 'resignal',
+ 'resource',
+ 'respect',
+ 'restart',
+ 'restore',
+ 'restrict',
+ 'resume',
+ 'retain',
+ 'return',
+ 'returned_sqlstate',
+ 'returning',
+ 'returns',
+ 'reuse',
+ 'reverse',
+ 'revoke',
+ 'right',
+ 'rlike',
+ 'role',
+ 'rollback',
+ 'rollup',
+ 'rotate',
+ 'routine',
+ 'row',
+ 'row_count',
+ 'row_format',
+ 'row_number',
+ 'rows',
+ 'rtree',
+ 'savepoint',
+ 'schedule',
+ 'schema',
+ 'schema_name',
+ 'schemas',
+ 'second',
+ 'second_microsecond',
+ 'secondary',
+ 'secondary_engine',
+ 'secondary_engine_attribute',
+ 'secondary_load',
+ 'secondary_unload',
+ 'security',
+ 'select',
+ 'sensitive',
+ 'separator',
+ 'serializable',
+ 'server',
+ 'session',
+ 'share',
+ 'show',
+ 'shutdown',
+ 'signal',
+ 'signed',
+ 'simple',
+ 'skip',
+ 'slave',
+ 'slow',
+ 'snapshot',
+ 'socket',
+ 'some',
+ 'soname',
+ 'sounds',
+ 'source',
+ 'spatial',
+ 'specific',
+ 'sql',
+ 'sql_after_gtids',
+ 'sql_after_mts_gaps',
+ 'sql_before_gtids',
+ 'sql_big_result',
+ 'sql_buffer_result',
+ 'sql_calc_found_rows',
+ 'sql_no_cache',
+ 'sql_small_result',
+ 'sql_thread',
+ 'sql_tsi_day',
+ 'sql_tsi_hour',
+ 'sql_tsi_minute',
+ 'sql_tsi_month',
+ 'sql_tsi_quarter',
+ 'sql_tsi_second',
+ 'sql_tsi_week',
+ 'sql_tsi_year',
+ 'sqlexception',
+ 'sqlstate',
+ 'sqlwarning',
+ 'srid',
+ 'ssl',
+ 'stacked',
+ 'start',
+ 'starting',
+ 'starts',
+ 'stats_auto_recalc',
+ 'stats_persistent',
+ 'stats_sample_pages',
+ 'status',
+ 'stop',
+ 'storage',
+ 'stored',
+ 'straight_join',
+ 'stream',
+ 'string',
+ 'subclass_origin',
+ 'subject',
+ 'subpartition',
+ 'subpartitions',
+ 'super',
+ 'suspend',
+ 'swaps',
+ 'switches',
+ 'system',
+ 'table',
+ 'table_checksum',
+ 'table_name',
+ 'tables',
+ 'tablespace',
+ 'temporary',
+ 'temptable',
+ 'terminated',
+ 'than',
+ 'then',
+ 'thread_priority',
+ 'ties',
+ 'timestampadd',
+ 'timestampdiff',
+ 'tls',
+ 'to',
+ 'trailing',
+ 'transaction',
+ 'trigger',
+ 'triggers',
+ 'true',
+ 'truncate',
+ 'type',
+ 'types',
+ 'unbounded',
+ 'uncommitted',
+ 'undefined',
+ 'undo',
+ 'undo_buffer_size',
+ 'undofile',
+ 'unicode',
+ 'uninstall',
+ 'union',
+ 'unique',
+ 'unknown',
+ 'unlock',
+ 'unsigned',
+ 'until',
+ 'update',
+ 'upgrade',
+ 'usage',
+ 'use',
+ 'use_frm',
+ 'user',
+ 'user_resources',
+ 'using',
+ 'utc_date',
+ 'utc_time',
+ 'utc_timestamp',
+ 'validation',
+ 'value',
+ 'values',
+ 'variables',
+ 'vcpu',
+ 'view',
+ 'virtual',
+ 'visible',
+ 'wait',
+ 'warnings',
+ 'week',
+ 'weight_string',
+ 'when',
+ 'where',
+ 'while',
+ 'window',
+ 'with',
+ 'without',
+ 'work',
+ 'wrapper',
+ 'write',
+ 'x509',
+ 'xa',
+ 'xid',
+ 'xml',
+ 'xor',
+ 'year_month',
+ 'zerofill',
+)
+
+
+if __name__ == '__main__': # pragma: no cover
+ import re
+ from urllib.request import urlopen
+
+ from pygments.util import format_lines
+
+ # MySQL source code
+ SOURCE_URL = 'https://github.com/mysql/mysql-server/raw/8.0'
+ LEX_URL = SOURCE_URL + '/sql/lex.h'
+ ITEM_CREATE_URL = SOURCE_URL + '/sql/item_create.cc'
+
+
+ def update_myself():
+ # Pull content from lex.h.
+ lex_file = urlopen(LEX_URL).read().decode('utf8', errors='ignore')
+ keywords = parse_lex_keywords(lex_file)
+ functions = parse_lex_functions(lex_file)
+ optimizer_hints = parse_lex_optimizer_hints(lex_file)
+
+ # Parse content in item_create.cc.
+ item_create_file = urlopen(ITEM_CREATE_URL).read().decode('utf8', errors='ignore')
+ functions.update(parse_item_create_functions(item_create_file))
+
+ # Remove data types from the set of keywords.
+ keywords -= set(MYSQL_DATATYPES)
+
+ update_content('MYSQL_FUNCTIONS', tuple(sorted(functions)))
+ update_content('MYSQL_KEYWORDS', tuple(sorted(keywords)))
+ update_content('MYSQL_OPTIMIZER_HINTS', tuple(sorted(optimizer_hints)))
+
+
+ def parse_lex_keywords(f):
+ """Parse keywords in lex.h."""
+
+ results = set()
+ for m in re.finditer(r'{SYM(?:_HK)?\("(?P<keyword>[a-z0-9_]+)",', f, flags=re.I):
+ results.add(m.group('keyword').lower())
+
+ if not results:
+ raise ValueError('No keywords found')
+
+ return results
+
+
+ def parse_lex_optimizer_hints(f):
+ """Parse optimizer hints in lex.h."""
+
+ results = set()
+ for m in re.finditer(r'{SYM_H\("(?P<keyword>[a-z0-9_]+)",', f, flags=re.I):
+ results.add(m.group('keyword').lower())
+
+ if not results:
+ raise ValueError('No optimizer hints found')
+
+ return results
+
+
+ def parse_lex_functions(f):
+ """Parse MySQL function names from lex.h."""
+
+ results = set()
+ for m in re.finditer(r'{SYM_FN?\("(?P<function>[a-z0-9_]+)",', f, flags=re.I):
+ results.add(m.group('function').lower())
+
+ if not results:
+ raise ValueError('No lex functions found')
+
+ return results
+
+
+ def parse_item_create_functions(f):
+ """Parse MySQL function names from item_create.cc."""
+
+ results = set()
+ for m in re.finditer(r'{"(?P<function>[^"]+?)",\s*SQL_F[^(]+?\(', f, flags=re.I):
+ results.add(m.group('function').lower())
+
+ if not results:
+ raise ValueError('No item_create functions found')
+
+ return results
+
+
+ def update_content(field_name, content):
+ """Overwrite this file with content parsed from MySQL's source code."""
+
+ with open(__file__) as f:
+ data = f.read()
+
+ # Line to start/end inserting
+ re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % field_name, re.M | re.S)
+ m = re_match.search(data)
+ if not m:
+ raise ValueError('Could not find an existing definition for %s' % field_name)
+
+ new_block = format_lines(field_name, content)
+ data = data[:m.start()] + new_block + data[m.end():]
+
+ with open(__file__, 'w', newline='\n') as f:
+ f.write(data)
+
+ update_myself()
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py
index 43611b29d0..c238e40a1d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py
@@ -4,7 +4,7 @@
Builtin list for the OpenEdgeLexer.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,15 +14,15 @@ OPENEDGEKEYWORDS = (
'ABSOL',
'ABSOLU',
'ABSOLUT',
- 'ABSOLUTE',
- 'ABSTRACT',
+ 'ABSOLUTE',
+ 'ABSTRACT',
'ACCELERATOR',
'ACCUM',
'ACCUMU',
'ACCUMUL',
'ACCUMULA',
'ACCUMULAT',
- 'ACCUMULATE',
+ 'ACCUMULATE',
'ACTIVE-FORM',
'ACTIVE-WINDOW',
'ADD',
@@ -54,8 +54,8 @@ OPENEDGEKEYWORDS = (
'AMBIGU',
'AMBIGUO',
'AMBIGUOU',
- 'AMBIGUOUS',
- 'ANALYZ',
+ 'AMBIGUOUS',
+ 'ANALYZ',
'ANALYZE',
'AND',
'ANSI-ONLY',
@@ -68,7 +68,7 @@ OPENEDGEKEYWORDS = (
'APPL-ALERT-BO',
'APPL-ALERT-BOX',
'APPL-ALERT-BOXE',
- 'APPL-ALERT-BOXES',
+ 'APPL-ALERT-BOXES',
'APPL-CONTEXT-ID',
'APPLICATION',
'APPLY',
@@ -83,16 +83,16 @@ OPENEDGEKEYWORDS = (
'ASCEND',
'ASCENDI',
'ASCENDIN',
- 'ASCENDING',
+ 'ASCENDING',
'ASK-OVERWRITE',
'ASSEMBLY',
'ASSIGN',
'ASYNC-REQUEST-COUNT',
'ASYNC-REQUEST-HANDLE',
- 'ASYNCHRONOUS',
+ 'ASYNCHRONOUS',
'AT',
'ATTACHED-PAIRLIST',
- 'ATTR',
+ 'ATTR',
'ATTR-SPACE',
'ATTRI',
'ATTRIB',
@@ -110,35 +110,35 @@ OPENEDGEKEYWORDS = (
'AUTO-COMPLET',
'AUTO-COMPLETI',
'AUTO-COMPLETIO',
- 'AUTO-COMPLETION',
- 'AUTO-END-KEY',
+ 'AUTO-COMPLETION',
+ 'AUTO-END-KEY',
'AUTO-ENDKEY',
'AUTO-GO',
'AUTO-IND',
'AUTO-INDE',
'AUTO-INDEN',
- 'AUTO-INDENT',
+ 'AUTO-INDENT',
'AUTO-RESIZE',
'AUTO-RET',
'AUTO-RETU',
'AUTO-RETUR',
- 'AUTO-RETURN',
+ 'AUTO-RETURN',
'AUTO-SYNCHRONIZE',
'AUTO-Z',
'AUTO-ZA',
- 'AUTO-ZAP',
- 'AUTOMATIC',
+ 'AUTO-ZAP',
+ 'AUTOMATIC',
'AVAIL',
'AVAILA',
'AVAILAB',
'AVAILABL',
- 'AVAILABLE',
+ 'AVAILABLE',
'AVAILABLE-FORMATS',
'AVE',
'AVER',
'AVERA',
'AVERAG',
- 'AVERAGE',
+ 'AVERAGE',
'AVG',
'BACK',
'BACKG',
@@ -146,8 +146,8 @@ OPENEDGEKEYWORDS = (
'BACKGRO',
'BACKGROU',
'BACKGROUN',
- 'BACKGROUND',
- 'BACKWARD',
+ 'BACKGROUND',
+ 'BACKWARD',
'BACKWARDS',
'BASE64-DECODE',
'BASE64-ENCODE',
@@ -158,12 +158,12 @@ OPENEDGEKEYWORDS = (
'BATCH-M',
'BATCH-MO',
'BATCH-MOD',
- 'BATCH-MODE',
+ 'BATCH-MODE',
'BATCH-SIZE',
'BEFORE-H',
'BEFORE-HI',
'BEFORE-HID',
- 'BEFORE-HIDE',
+ 'BEFORE-HIDE',
'BEGIN-EVENT-GROUP',
'BEGINS',
'BELL',
@@ -172,26 +172,26 @@ OPENEDGEKEYWORDS = (
'BGCO',
'BGCOL',
'BGCOLO',
- 'BGCOLOR',
+ 'BGCOLOR',
'BIG-ENDIAN',
'BINARY',
'BIND',
'BIND-WHERE',
'BLANK',
'BLOCK-ITERATION-DISPLAY',
- 'BLOCK-LEVEL',
+ 'BLOCK-LEVEL',
'BORDER-B',
'BORDER-BO',
'BORDER-BOT',
'BORDER-BOTT',
'BORDER-BOTTO',
- 'BORDER-BOTTOM-CHARS',
+ 'BORDER-BOTTOM-CHARS',
'BORDER-BOTTOM-P',
'BORDER-BOTTOM-PI',
'BORDER-BOTTOM-PIX',
'BORDER-BOTTOM-PIXE',
'BORDER-BOTTOM-PIXEL',
- 'BORDER-BOTTOM-PIXELS',
+ 'BORDER-BOTTOM-PIXELS',
'BORDER-L',
'BORDER-LE',
'BORDER-LEF',
@@ -201,13 +201,13 @@ OPENEDGEKEYWORDS = (
'BORDER-LEFT-CH',
'BORDER-LEFT-CHA',
'BORDER-LEFT-CHAR',
- 'BORDER-LEFT-CHARS',
+ 'BORDER-LEFT-CHARS',
'BORDER-LEFT-P',
'BORDER-LEFT-PI',
'BORDER-LEFT-PIX',
'BORDER-LEFT-PIXE',
'BORDER-LEFT-PIXEL',
- 'BORDER-LEFT-PIXELS',
+ 'BORDER-LEFT-PIXELS',
'BORDER-R',
'BORDER-RI',
'BORDER-RIG',
@@ -218,13 +218,13 @@ OPENEDGEKEYWORDS = (
'BORDER-RIGHT-CH',
'BORDER-RIGHT-CHA',
'BORDER-RIGHT-CHAR',
- 'BORDER-RIGHT-CHARS',
+ 'BORDER-RIGHT-CHARS',
'BORDER-RIGHT-P',
'BORDER-RIGHT-PI',
'BORDER-RIGHT-PIX',
'BORDER-RIGHT-PIXE',
'BORDER-RIGHT-PIXEL',
- 'BORDER-RIGHT-PIXELS',
+ 'BORDER-RIGHT-PIXELS',
'BORDER-T',
'BORDER-TO',
'BORDER-TOP',
@@ -233,19 +233,19 @@ OPENEDGEKEYWORDS = (
'BORDER-TOP-CH',
'BORDER-TOP-CHA',
'BORDER-TOP-CHAR',
- 'BORDER-TOP-CHARS',
+ 'BORDER-TOP-CHARS',
'BORDER-TOP-P',
'BORDER-TOP-PI',
'BORDER-TOP-PIX',
'BORDER-TOP-PIXE',
'BORDER-TOP-PIXEL',
- 'BORDER-TOP-PIXELS',
+ 'BORDER-TOP-PIXELS',
'BOX',
'BOX-SELECT',
'BOX-SELECTA',
'BOX-SELECTAB',
'BOX-SELECTABL',
- 'BOX-SELECTABLE',
+ 'BOX-SELECTABLE',
'BREAK',
'BROWSE',
'BUFFER',
@@ -258,7 +258,7 @@ OPENEDGEKEYWORDS = (
'BUFFER-HANDLE',
'BUFFER-LINES',
'BUFFER-NAME',
- 'BUFFER-PARTITION-ID',
+ 'BUFFER-PARTITION-ID',
'BUFFER-RELEASE',
'BUFFER-VALUE',
'BUTTON',
@@ -274,14 +274,14 @@ OPENEDGEKEYWORDS = (
'CAN-CREATE',
'CAN-DELETE',
'CAN-DO',
- 'CAN-DO-DOMAIN-SUPPORT',
+ 'CAN-DO-DOMAIN-SUPPORT',
'CAN-FIND',
'CAN-QUERY',
'CAN-READ',
'CAN-SET',
'CAN-WRITE',
- 'CANCEL-BREAK',
- 'CANCEL-BUTTON',
+ 'CANCEL-BREAK',
+ 'CANCEL-BUTTON',
'CAPS',
'CAREFUL-PAINT',
'CASE',
@@ -291,15 +291,15 @@ OPENEDGEKEYWORDS = (
'CASE-SENSIT',
'CASE-SENSITI',
'CASE-SENSITIV',
- 'CASE-SENSITIVE',
+ 'CASE-SENSITIVE',
'CAST',
'CATCH',
'CDECL',
'CENTER',
'CENTERE',
- 'CENTERED',
+ 'CENTERED',
'CHAINED',
- 'CHARACTER',
+ 'CHARACTER',
'CHARACTER_LENGTH',
'CHARSET',
'CHECK',
@@ -314,8 +314,8 @@ OPENEDGEKEYWORDS = (
'CLEAR-SELECT',
'CLEAR-SELECTI',
'CLEAR-SELECTIO',
- 'CLEAR-SELECTION',
- 'CLEAR-SORT-ARROW',
+ 'CLEAR-SELECTION',
+ 'CLEAR-SORT-ARROW',
'CLEAR-SORT-ARROWS',
'CLIENT-CONNECTION-ID',
'CLIENT-PRINCIPAL',
@@ -329,34 +329,34 @@ OPENEDGEKEYWORDS = (
'CODEBASE-LOCATOR',
'CODEPAGE',
'CODEPAGE-CONVERT',
- 'COL',
- 'COL-OF',
+ 'COL',
+ 'COL-OF',
'COLLATE',
'COLON',
'COLON-ALIGN',
'COLON-ALIGNE',
- 'COLON-ALIGNED',
+ 'COLON-ALIGNED',
'COLOR',
'COLOR-TABLE',
'COLU',
'COLUM',
- 'COLUMN',
+ 'COLUMN',
'COLUMN-BGCOLOR',
'COLUMN-DCOLOR',
'COLUMN-FGCOLOR',
'COLUMN-FONT',
'COLUMN-LAB',
'COLUMN-LABE',
- 'COLUMN-LABEL',
+ 'COLUMN-LABEL',
'COLUMN-MOVABLE',
'COLUMN-OF',
'COLUMN-PFCOLOR',
'COLUMN-READ-ONLY',
'COLUMN-RESIZABLE',
- 'COLUMN-SCROLLING',
+ 'COLUMN-SCROLLING',
'COLUMNS',
- 'COM-HANDLE',
- 'COM-SELF',
+ 'COM-HANDLE',
+ 'COM-SELF',
'COMBO-BOX',
'COMMAND',
'COMPARES',
@@ -381,7 +381,7 @@ OPENEDGEKEYWORDS = (
'CONVERT-3D-COLORS',
'CONVERT-TO-OFFS',
'CONVERT-TO-OFFSE',
- 'CONVERT-TO-OFFSET',
+ 'CONVERT-TO-OFFSET',
'COPY-DATASET',
'COPY-LOB',
'COPY-SAX-ATTRIBUTES',
@@ -415,24 +415,24 @@ OPENEDGEKEYWORDS = (
'CURRENT-ENVIRONM',
'CURRENT-ENVIRONME',
'CURRENT-ENVIRONMEN',
- 'CURRENT-ENVIRONMENT',
+ 'CURRENT-ENVIRONMENT',
'CURRENT-ITERATION',
'CURRENT-LANG',
'CURRENT-LANGU',
'CURRENT-LANGUA',
'CURRENT-LANGUAG',
- 'CURRENT-LANGUAGE',
+ 'CURRENT-LANGUAGE',
'CURRENT-QUERY',
- 'CURRENT-REQUEST-INFO',
- 'CURRENT-RESPONSE-INFO',
+ 'CURRENT-REQUEST-INFO',
+ 'CURRENT-RESPONSE-INFO',
'CURRENT-RESULT-ROW',
'CURRENT-ROW-MODIFIED',
'CURRENT-VALUE',
'CURRENT-WINDOW',
- 'CURRENT_DATE',
+ 'CURRENT_DATE',
'CURS',
'CURSO',
- 'CURSOR',
+ 'CURSOR',
'CURSOR-CHAR',
'CURSOR-LINE',
'CURSOR-OFFSET',
@@ -440,13 +440,13 @@ OPENEDGEKEYWORDS = (
'DATA-ENTRY-RET',
'DATA-ENTRY-RETU',
'DATA-ENTRY-RETUR',
- 'DATA-ENTRY-RETURN',
+ 'DATA-ENTRY-RETURN',
'DATA-REL',
'DATA-RELA',
'DATA-RELAT',
'DATA-RELATI',
'DATA-RELATIO',
- 'DATA-RELATION',
+ 'DATA-RELATION',
'DATA-SOURCE',
'DATA-SOURCE-COMPLETE-MAP',
'DATA-SOURCE-MODIFIED',
@@ -454,21 +454,21 @@ OPENEDGEKEYWORDS = (
'DATA-T',
'DATA-TY',
'DATA-TYP',
- 'DATA-TYPE',
- 'DATABASE',
- 'DATASERVERS',
- 'DATASET',
- 'DATASET-HANDLE',
- 'DATE',
+ 'DATA-TYPE',
+ 'DATABASE',
+ 'DATASERVERS',
+ 'DATASET',
+ 'DATASET-HANDLE',
+ 'DATE',
'DATE-F',
'DATE-FO',
'DATE-FOR',
'DATE-FORM',
'DATE-FORMA',
- 'DATE-FORMAT',
+ 'DATE-FORMAT',
'DAY',
- 'DB-CONTEXT',
- 'DB-REFERENCES',
+ 'DB-CONTEXT',
+ 'DB-REFERENCES',
'DBCODEPAGE',
'DBCOLLATION',
'DBNAME',
@@ -481,39 +481,39 @@ OPENEDGEKEYWORDS = (
'DBRESTRICTI',
'DBRESTRICTIO',
'DBRESTRICTION',
- 'DBRESTRICTIONS',
+ 'DBRESTRICTIONS',
'DBTASKID',
'DBTYPE',
'DBVERS',
'DBVERSI',
'DBVERSIO',
- 'DBVERSION',
+ 'DBVERSION',
'DCOLOR',
'DDE',
'DDE-ERROR',
- 'DDE-I',
+ 'DDE-I',
'DDE-ID',
'DDE-ITEM',
'DDE-NAME',
'DDE-TOPIC',
'DEBLANK',
- 'DEBU',
+ 'DEBU',
'DEBUG',
'DEBUG-ALERT',
- 'DEBUG-LIST',
+ 'DEBUG-LIST',
'DEBUGGER',
- 'DECIMAL',
+ 'DECIMAL',
'DECIMALS',
'DECLARE',
'DECLARE-NAMESPACE',
'DECRYPT',
'DEFAULT',
- 'DEFAULT-B',
- 'DEFAULT-BU',
+ 'DEFAULT-B',
+ 'DEFAULT-BU',
'DEFAULT-BUFFER-HANDLE',
- 'DEFAULT-BUT',
- 'DEFAULT-BUTT',
- 'DEFAULT-BUTTO',
+ 'DEFAULT-BUT',
+ 'DEFAULT-BUTT',
+ 'DEFAULT-BUTTO',
'DEFAULT-BUTTON',
'DEFAULT-COMMIT',
'DEFAULT-EX',
@@ -523,28 +523,28 @@ OPENEDGEKEYWORDS = (
'DEFAULT-EXTENS',
'DEFAULT-EXTENSI',
'DEFAULT-EXTENSIO',
- 'DEFAULT-EXTENSION',
+ 'DEFAULT-EXTENSION',
'DEFAULT-NOXL',
'DEFAULT-NOXLA',
'DEFAULT-NOXLAT',
- 'DEFAULT-NOXLATE',
+ 'DEFAULT-NOXLATE',
'DEFAULT-VALUE',
'DEFAULT-WINDOW',
- 'DEFINE',
- 'DEFINE-USER-EVENT-MANAGER',
+ 'DEFINE',
+ 'DEFINE-USER-EVENT-MANAGER',
'DEFINED',
'DEL',
'DELE',
- 'DELEGATE',
+ 'DELEGATE',
'DELET',
- 'DELETE PROCEDURE',
- 'DELETE',
+ 'DELETE PROCEDURE',
+ 'DELETE',
'DELETE-CHAR',
'DELETE-CHARA',
'DELETE-CHARAC',
'DELETE-CHARACT',
'DELETE-CHARACTE',
- 'DELETE-CHARACTER',
+ 'DELETE-CHARACTER',
'DELETE-CURRENT-ROW',
'DELETE-LINE',
'DELETE-RESULT-LIST-ENTRY',
@@ -557,11 +557,11 @@ OPENEDGEKEYWORDS = (
'DESCEND',
'DESCENDI',
'DESCENDIN',
- 'DESCENDING',
+ 'DESCENDING',
'DESELECT-FOCUSED-ROW',
'DESELECT-ROWS',
'DESELECT-SELECTED-ROW',
- 'DESELECTION',
+ 'DESELECTION',
'DESTRUCTOR',
'DIALOG-BOX',
'DICT',
@@ -570,27 +570,27 @@ OPENEDGEKEYWORDS = (
'DICTION',
'DICTIONA',
'DICTIONAR',
- 'DICTIONARY',
+ 'DICTIONARY',
'DIR',
'DISABLE',
'DISABLE-AUTO-ZAP',
'DISABLE-DUMP-TRIGGERS',
'DISABLE-LOAD-TRIGGERS',
- 'DISABLED',
+ 'DISABLED',
'DISCON',
'DISCONN',
'DISCONNE',
'DISCONNEC',
- 'DISCONNECT',
+ 'DISCONNECT',
'DISP',
'DISPL',
'DISPLA',
- 'DISPLAY',
+ 'DISPLAY',
'DISPLAY-MESSAGE',
'DISPLAY-T',
'DISPLAY-TY',
'DISPLAY-TYP',
- 'DISPLAY-TYPE',
+ 'DISPLAY-TYPE',
'DISTINCT',
'DO',
'DOMAIN-DESCRIPTION',
@@ -605,13 +605,13 @@ OPENEDGEKEYWORDS = (
'DROP-DOWN-LIST',
'DROP-FILE-NOTIFY',
'DROP-TARGET',
- 'DS-CLOSE-CURSOR',
- 'DSLOG-MANAGER',
+ 'DS-CLOSE-CURSOR',
+ 'DSLOG-MANAGER',
'DUMP',
'DYNAMIC',
- 'DYNAMIC-ENUM',
+ 'DYNAMIC-ENUM',
'DYNAMIC-FUNCTION',
- 'DYNAMIC-INVOKE',
+ 'DYNAMIC-INVOKE',
'EACH',
'ECHO',
'EDGE',
@@ -620,20 +620,20 @@ OPENEDGEKEYWORDS = (
'EDGE-CH',
'EDGE-CHA',
'EDGE-CHAR',
- 'EDGE-CHARS',
+ 'EDGE-CHARS',
'EDGE-P',
'EDGE-PI',
'EDGE-PIX',
'EDGE-PIXE',
'EDGE-PIXEL',
- 'EDGE-PIXELS',
+ 'EDGE-PIXELS',
'EDIT-CAN-PASTE',
'EDIT-CAN-UNDO',
'EDIT-CLEAR',
'EDIT-COPY',
'EDIT-CUT',
- 'EDIT-PASTE',
- 'EDIT-UNDO',
+ 'EDIT-PASTE',
+ 'EDIT-UNDO',
'EDITING',
'EDITOR',
'ELSE',
@@ -655,41 +655,41 @@ OPENEDGEKEYWORDS = (
'END-RESIZE',
'END-ROW-RESIZE',
'END-USER-PROMPT',
- 'ENDKEY',
+ 'ENDKEY',
'ENTERED',
- 'ENTITY-EXPANSION-LIMIT',
+ 'ENTITY-EXPANSION-LIMIT',
'ENTRY',
- 'ENUM',
+ 'ENUM',
'EQ',
'ERROR',
'ERROR-COL',
'ERROR-COLU',
'ERROR-COLUM',
- 'ERROR-COLUMN',
+ 'ERROR-COLUMN',
'ERROR-ROW',
'ERROR-STACK-TRACE',
'ERROR-STAT',
'ERROR-STATU',
- 'ERROR-STATUS',
+ 'ERROR-STATUS',
'ESCAPE',
'ETIME',
- 'EVENT',
+ 'EVENT',
'EVENT-GROUP-ID',
'EVENT-PROCEDURE',
'EVENT-PROCEDURE-CONTEXT',
'EVENT-T',
'EVENT-TY',
'EVENT-TYP',
- 'EVENT-TYPE',
- 'EVENTS',
+ 'EVENT-TYPE',
+ 'EVENTS',
'EXCEPT',
'EXCLUSIVE',
'EXCLUSIVE-',
- 'EXCLUSIVE-ID',
+ 'EXCLUSIVE-ID',
'EXCLUSIVE-L',
'EXCLUSIVE-LO',
'EXCLUSIVE-LOC',
- 'EXCLUSIVE-LOCK',
+ 'EXCLUSIVE-LOCK',
'EXCLUSIVE-WEB-USER',
'EXECUTE',
'EXISTS',
@@ -709,7 +709,7 @@ OPENEDGEKEYWORDS = (
'FGCO',
'FGCOL',
'FGCOLO',
- 'FGCOLOR',
+ 'FGCOLOR',
'FIELD',
'FIELDS',
'FILE',
@@ -722,19 +722,19 @@ OPENEDGEKEYWORDS = (
'FILE-INFORMAT',
'FILE-INFORMATI',
'FILE-INFORMATIO',
- 'FILE-INFORMATION',
+ 'FILE-INFORMATION',
'FILE-MOD-DATE',
'FILE-MOD-TIME',
'FILE-NAME',
'FILE-OFF',
'FILE-OFFS',
'FILE-OFFSE',
- 'FILE-OFFSET',
+ 'FILE-OFFSET',
'FILE-SIZE',
'FILE-TYPE',
- 'FILENAME',
+ 'FILENAME',
'FILL',
- 'FILL-IN',
+ 'FILL-IN',
'FILLED',
'FILTERS',
'FINAL',
@@ -751,7 +751,7 @@ OPENEDGEKEYWORDS = (
'FIND-SELECT',
'FIND-UNIQUE',
'FIND-WRAP-AROUND',
- 'FINDER',
+ 'FINDER',
'FIRST',
'FIRST-ASYNCH-REQUEST',
'FIRST-CHILD',
@@ -764,12 +764,12 @@ OPENEDGEKEYWORDS = (
'FIRST-PROCED',
'FIRST-PROCEDU',
'FIRST-PROCEDUR',
- 'FIRST-PROCEDURE',
+ 'FIRST-PROCEDURE',
'FIRST-SERVER',
'FIRST-TAB-I',
'FIRST-TAB-IT',
'FIRST-TAB-ITE',
- 'FIRST-TAB-ITEM',
+ 'FIRST-TAB-ITEM',
'FIT-LAST-COLUMN',
'FIXED-ONLY',
'FLAT-BUTTON',
@@ -787,26 +787,26 @@ OPENEDGEKEYWORDS = (
'FOREGRO',
'FOREGROU',
'FOREGROUN',
- 'FOREGROUND',
- 'FORM INPUT',
+ 'FOREGROUND',
+ 'FORM INPUT',
'FORM',
- 'FORM-LONG-INPUT',
- 'FORMA',
+ 'FORM-LONG-INPUT',
+ 'FORMA',
'FORMAT',
- 'FORMATTE',
+ 'FORMATTE',
'FORMATTED',
'FORWARD',
'FORWARDS',
- 'FRAGMEN',
+ 'FRAGMEN',
'FRAGMENT',
- 'FRAM',
+ 'FRAM',
'FRAME',
'FRAME-COL',
'FRAME-DB',
'FRAME-DOWN',
'FRAME-FIELD',
'FRAME-FILE',
- 'FRAME-INDE',
+ 'FRAME-INDE',
'FRAME-INDEX',
'FRAME-LINE',
'FRAME-NAME',
@@ -815,10 +815,10 @@ OPENEDGEKEYWORDS = (
'FRAME-SPAC',
'FRAME-SPACI',
'FRAME-SPACIN',
- 'FRAME-SPACING',
+ 'FRAME-SPACING',
'FRAME-VAL',
'FRAME-VALU',
- 'FRAME-VALUE',
+ 'FRAME-VALUE',
'FRAME-X',
'FRAME-Y',
'FREQUENCY',
@@ -827,51 +827,51 @@ OPENEDGEKEYWORDS = (
'FROM-CH',
'FROM-CHA',
'FROM-CHAR',
- 'FROM-CHARS',
+ 'FROM-CHARS',
'FROM-CUR',
'FROM-CURR',
'FROM-CURRE',
'FROM-CURREN',
- 'FROM-CURRENT',
+ 'FROM-CURRENT',
'FROM-P',
'FROM-PI',
'FROM-PIX',
'FROM-PIXE',
'FROM-PIXEL',
- 'FROM-PIXELS',
+ 'FROM-PIXELS',
'FULL-HEIGHT',
'FULL-HEIGHT-',
'FULL-HEIGHT-C',
'FULL-HEIGHT-CH',
'FULL-HEIGHT-CHA',
'FULL-HEIGHT-CHAR',
- 'FULL-HEIGHT-CHARS',
+ 'FULL-HEIGHT-CHARS',
'FULL-HEIGHT-P',
'FULL-HEIGHT-PI',
'FULL-HEIGHT-PIX',
'FULL-HEIGHT-PIXE',
'FULL-HEIGHT-PIXEL',
- 'FULL-HEIGHT-PIXELS',
+ 'FULL-HEIGHT-PIXELS',
'FULL-PATHN',
'FULL-PATHNA',
'FULL-PATHNAM',
- 'FULL-PATHNAME',
+ 'FULL-PATHNAME',
'FULL-WIDTH',
'FULL-WIDTH-',
'FULL-WIDTH-C',
'FULL-WIDTH-CH',
'FULL-WIDTH-CHA',
'FULL-WIDTH-CHAR',
- 'FULL-WIDTH-CHARS',
+ 'FULL-WIDTH-CHARS',
'FULL-WIDTH-P',
'FULL-WIDTH-PI',
'FULL-WIDTH-PIX',
'FULL-WIDTH-PIXE',
'FULL-WIDTH-PIXEL',
- 'FULL-WIDTH-PIXELS',
+ 'FULL-WIDTH-PIXELS',
'FUNCTION',
'FUNCTION-CALL-TYPE',
- 'GATEWAY',
+ 'GATEWAY',
'GATEWAYS',
'GE',
'GENERATE-MD5',
@@ -889,16 +889,16 @@ OPENEDGEKEYWORDS = (
'GET-BLUE-VA',
'GET-BLUE-VAL',
'GET-BLUE-VALU',
- 'GET-BLUE-VALUE',
+ 'GET-BLUE-VALUE',
'GET-BROWSE-COLUMN',
- 'GET-BUFFER-HANDLE',
+ 'GET-BUFFER-HANDLE',
'GET-BYTE',
'GET-CALLBACK-PROC-CONTEXT',
'GET-CALLBACK-PROC-NAME',
'GET-CGI-LIST',
'GET-CGI-LONG-VALUE',
'GET-CGI-VALUE',
- 'GET-CLASS',
+ 'GET-CLASS',
'GET-CODEPAGES',
'GET-COLLATIONS',
'GET-CONFIG-VALUE',
@@ -910,7 +910,7 @@ OPENEDGEKEYWORDS = (
'GET-ERROR-ROW',
'GET-FILE',
'GET-FILE-NAME',
- 'GET-FILE-OFFSE',
+ 'GET-FILE-OFFSE',
'GET-FILE-OFFSET',
'GET-FIRST',
'GET-FLOAT',
@@ -920,14 +920,14 @@ OPENEDGEKEYWORDS = (
'GET-GREEN-VA',
'GET-GREEN-VAL',
'GET-GREEN-VALU',
- 'GET-GREEN-VALUE',
+ 'GET-GREEN-VALUE',
'GET-INDEX-BY-NAMESPACE-NAME',
'GET-INDEX-BY-QNAME',
'GET-INT64',
'GET-ITERATION',
'GET-KEY-VAL',
'GET-KEY-VALU',
- 'GET-KEY-VALUE',
+ 'GET-KEY-VALUE',
'GET-LAST',
'GET-LOCALNAME-BY-INDEX',
'GET-LONG',
@@ -945,7 +945,7 @@ OPENEDGEKEYWORDS = (
'GET-RED-VA',
'GET-RED-VAL',
'GET-RED-VALU',
- 'GET-RED-VALUE',
+ 'GET-RED-VALUE',
'GET-REPOSITIONED-ROW',
'GET-RGB-VALUE',
'GET-SELECTED',
@@ -955,7 +955,7 @@ OPENEDGEKEYWORDS = (
'GET-SELECTED-WID',
'GET-SELECTED-WIDG',
'GET-SELECTED-WIDGE',
- 'GET-SELECTED-WIDGET',
+ 'GET-SELECTED-WIDGET',
'GET-SHORT',
'GET-SIGNATURE',
'GET-SIZE',
@@ -967,26 +967,26 @@ OPENEDGEKEYWORDS = (
'GET-TEXT-HEIGHT-CH',
'GET-TEXT-HEIGHT-CHA',
'GET-TEXT-HEIGHT-CHAR',
- 'GET-TEXT-HEIGHT-CHARS',
+ 'GET-TEXT-HEIGHT-CHARS',
'GET-TEXT-HEIGHT-P',
'GET-TEXT-HEIGHT-PI',
'GET-TEXT-HEIGHT-PIX',
'GET-TEXT-HEIGHT-PIXE',
'GET-TEXT-HEIGHT-PIXEL',
- 'GET-TEXT-HEIGHT-PIXELS',
+ 'GET-TEXT-HEIGHT-PIXELS',
'GET-TEXT-WIDTH',
'GET-TEXT-WIDTH-',
'GET-TEXT-WIDTH-C',
'GET-TEXT-WIDTH-CH',
'GET-TEXT-WIDTH-CHA',
'GET-TEXT-WIDTH-CHAR',
- 'GET-TEXT-WIDTH-CHARS',
+ 'GET-TEXT-WIDTH-CHARS',
'GET-TEXT-WIDTH-P',
'GET-TEXT-WIDTH-PI',
'GET-TEXT-WIDTH-PIX',
'GET-TEXT-WIDTH-PIXE',
'GET-TEXT-WIDTH-PIXEL',
- 'GET-TEXT-WIDTH-PIXELS',
+ 'GET-TEXT-WIDTH-PIXELS',
'GET-TYPE-BY-INDEX',
'GET-TYPE-BY-NAMESPACE-NAME',
'GET-TYPE-BY-QNAME',
@@ -997,18 +997,18 @@ OPENEDGEKEYWORDS = (
'GET-VALUE-BY-NAMESPACE-NAME',
'GET-VALUE-BY-QNAME',
'GET-WAIT-STATE',
- 'GETBYTE',
+ 'GETBYTE',
'GLOBAL',
'GO-ON',
'GO-PEND',
'GO-PENDI',
'GO-PENDIN',
- 'GO-PENDING',
+ 'GO-PENDING',
'GRANT',
'GRAPHIC-E',
'GRAPHIC-ED',
'GRAPHIC-EDG',
- 'GRAPHIC-EDGE',
+ 'GRAPHIC-EDGE',
'GRID-FACTOR-H',
'GRID-FACTOR-HO',
'GRID-FACTOR-HOR',
@@ -1018,7 +1018,7 @@ OPENEDGEKEYWORDS = (
'GRID-FACTOR-HORIZON',
'GRID-FACTOR-HORIZONT',
'GRID-FACTOR-HORIZONTA',
- 'GRID-FACTOR-HORIZONTAL',
+ 'GRID-FACTOR-HORIZONTAL',
'GRID-FACTOR-V',
'GRID-FACTOR-VE',
'GRID-FACTOR-VER',
@@ -1026,38 +1026,38 @@ OPENEDGEKEYWORDS = (
'GRID-FACTOR-VERTI',
'GRID-FACTOR-VERTIC',
'GRID-FACTOR-VERTICA',
- 'GRID-FACTOR-VERTICAL',
+ 'GRID-FACTOR-VERTICAL',
'GRID-SNAP',
'GRID-UNIT-HEIGHT',
'GRID-UNIT-HEIGHT-',
'GRID-UNIT-HEIGHT-C',
'GRID-UNIT-HEIGHT-CH',
'GRID-UNIT-HEIGHT-CHA',
- 'GRID-UNIT-HEIGHT-CHARS',
+ 'GRID-UNIT-HEIGHT-CHARS',
'GRID-UNIT-HEIGHT-P',
'GRID-UNIT-HEIGHT-PI',
'GRID-UNIT-HEIGHT-PIX',
'GRID-UNIT-HEIGHT-PIXE',
'GRID-UNIT-HEIGHT-PIXEL',
- 'GRID-UNIT-HEIGHT-PIXELS',
+ 'GRID-UNIT-HEIGHT-PIXELS',
'GRID-UNIT-WIDTH',
'GRID-UNIT-WIDTH-',
'GRID-UNIT-WIDTH-C',
'GRID-UNIT-WIDTH-CH',
'GRID-UNIT-WIDTH-CHA',
'GRID-UNIT-WIDTH-CHAR',
- 'GRID-UNIT-WIDTH-CHARS',
+ 'GRID-UNIT-WIDTH-CHARS',
'GRID-UNIT-WIDTH-P',
'GRID-UNIT-WIDTH-PI',
'GRID-UNIT-WIDTH-PIX',
'GRID-UNIT-WIDTH-PIXE',
'GRID-UNIT-WIDTH-PIXEL',
- 'GRID-UNIT-WIDTH-PIXELS',
+ 'GRID-UNIT-WIDTH-PIXELS',
'GRID-VISIBLE',
'GROUP',
'GT',
'GUID',
- 'HANDLE',
+ 'HANDLE',
'HANDLER',
'HAS-RECORDS',
'HAVING',
@@ -1068,13 +1068,13 @@ OPENEDGEKEYWORDS = (
'HEIGHT-CH',
'HEIGHT-CHA',
'HEIGHT-CHAR',
- 'HEIGHT-CHARS',
+ 'HEIGHT-CHARS',
'HEIGHT-P',
'HEIGHT-PI',
'HEIGHT-PIX',
'HEIGHT-PIXE',
'HEIGHT-PIXEL',
- 'HEIGHT-PIXELS',
+ 'HEIGHT-PIXELS',
'HELP',
'HEX-DECODE',
'HEX-ENCODE',
@@ -1086,7 +1086,7 @@ OPENEDGEKEYWORDS = (
'HORIZON',
'HORIZONT',
'HORIZONTA',
- 'HORIZONTAL',
+ 'HORIZONTAL',
'HOST-BYTE-ORDER',
'HTML-CHARSET',
'HTML-END-OF-LINE',
@@ -1108,25 +1108,25 @@ OPENEDGEKEYWORDS = (
'IMAGE-SIZE-CH',
'IMAGE-SIZE-CHA',
'IMAGE-SIZE-CHAR',
- 'IMAGE-SIZE-CHARS',
+ 'IMAGE-SIZE-CHARS',
'IMAGE-SIZE-P',
'IMAGE-SIZE-PI',
'IMAGE-SIZE-PIX',
'IMAGE-SIZE-PIXE',
'IMAGE-SIZE-PIXEL',
- 'IMAGE-SIZE-PIXELS',
+ 'IMAGE-SIZE-PIXELS',
'IMAGE-UP',
'IMMEDIATE-DISPLAY',
'IMPLEMENTS',
'IMPORT',
'IMPORT-PRINCIPAL',
'IN',
- 'IN-HANDLE',
+ 'IN-HANDLE',
'INCREMENT-EXCLUSIVE-ID',
'INDEX',
'INDEX-HINT',
'INDEX-INFORMATION',
- 'INDEXED-REPOSITION',
+ 'INDEXED-REPOSITION',
'INDICATOR',
'INFO',
'INFOR',
@@ -1135,22 +1135,22 @@ OPENEDGEKEYWORDS = (
'INFORMAT',
'INFORMATI',
'INFORMATIO',
- 'INFORMATION',
+ 'INFORMATION',
'INHERIT-BGC',
'INHERIT-BGCO',
'INHERIT-BGCOL',
'INHERIT-BGCOLO',
- 'INHERIT-BGCOLOR',
+ 'INHERIT-BGCOLOR',
'INHERIT-FGC',
'INHERIT-FGCO',
'INHERIT-FGCOL',
'INHERIT-FGCOLO',
- 'INHERIT-FGCOLOR',
+ 'INHERIT-FGCOLOR',
'INHERITS',
'INIT',
'INITI',
'INITIA',
- 'INITIAL',
+ 'INITIAL',
'INITIAL-DIR',
'INITIAL-FILTER',
'INITIALIZE-DOCUMENT-TYPE',
@@ -1163,7 +1163,7 @@ OPENEDGEKEYWORDS = (
'INPUT-OUT',
'INPUT-OUTP',
'INPUT-OUTPU',
- 'INPUT-OUTPUT',
+ 'INPUT-OUTPUT',
'INPUT-VALUE',
'INSERT',
'INSERT-ATTRIBUTE',
@@ -1173,16 +1173,16 @@ OPENEDGEKEYWORDS = (
'INSERT-BACK',
'INSERT-BACKT',
'INSERT-BACKTA',
- 'INSERT-BACKTAB',
+ 'INSERT-BACKTAB',
'INSERT-FILE',
'INSERT-ROW',
'INSERT-STRING',
'INSERT-T',
'INSERT-TA',
- 'INSERT-TAB',
- 'INT64',
- 'INT',
- 'INTEGER',
+ 'INSERT-TAB',
+ 'INT64',
+ 'INT',
+ 'INTEGER',
'INTERFACE',
'INTERNAL-ENTRIES',
'INTO',
@@ -1194,16 +1194,16 @@ OPENEDGEKEYWORDS = (
'IS-ATTR-SP',
'IS-ATTR-SPA',
'IS-ATTR-SPAC',
- 'IS-ATTR-SPACE',
+ 'IS-ATTR-SPACE',
'IS-CLASS',
- 'IS-JSON',
+ 'IS-JSON',
'IS-LEAD-BYTE',
'IS-OPEN',
'IS-PARAMETER-SET',
- 'IS-PARTITIONED',
+ 'IS-PARTITIONED',
'IS-ROW-SELECTED',
'IS-SELECTED',
- 'IS-XML',
+ 'IS-XML',
'ITEM',
'ITEMS-PER-ROW',
'JOIN',
@@ -1216,24 +1216,24 @@ OPENEDGEKEYWORDS = (
'KEEP-FRAME-Z-OR',
'KEEP-FRAME-Z-ORD',
'KEEP-FRAME-Z-ORDE',
- 'KEEP-FRAME-Z-ORDER',
+ 'KEEP-FRAME-Z-ORDER',
'KEEP-MESSAGES',
'KEEP-SECURITY-CACHE',
'KEEP-TAB-ORDER',
'KEY',
- 'KEY-CODE',
- 'KEY-FUNC',
- 'KEY-FUNCT',
- 'KEY-FUNCTI',
- 'KEY-FUNCTIO',
- 'KEY-FUNCTION',
- 'KEY-LABEL',
+ 'KEY-CODE',
+ 'KEY-FUNC',
+ 'KEY-FUNCT',
+ 'KEY-FUNCTI',
+ 'KEY-FUNCTIO',
+ 'KEY-FUNCTION',
+ 'KEY-LABEL',
'KEYCODE',
'KEYFUNC',
'KEYFUNCT',
'KEYFUNCTI',
'KEYFUNCTIO',
- 'KEYFUNCTION',
+ 'KEYFUNCTION',
'KEYLABEL',
'KEYS',
'KEYWORD',
@@ -1243,27 +1243,27 @@ OPENEDGEKEYWORDS = (
'LABEL-BGCO',
'LABEL-BGCOL',
'LABEL-BGCOLO',
- 'LABEL-BGCOLOR',
+ 'LABEL-BGCOLOR',
'LABEL-DC',
'LABEL-DCO',
'LABEL-DCOL',
'LABEL-DCOLO',
- 'LABEL-DCOLOR',
+ 'LABEL-DCOLOR',
'LABEL-FGC',
'LABEL-FGCO',
'LABEL-FGCOL',
'LABEL-FGCOLO',
- 'LABEL-FGCOLOR',
+ 'LABEL-FGCOLOR',
'LABEL-FONT',
'LABEL-PFC',
'LABEL-PFCO',
'LABEL-PFCOL',
'LABEL-PFCOLO',
- 'LABEL-PFCOLOR',
+ 'LABEL-PFCOLOR',
'LABELS',
- 'LABELS-HAVE-COLONS',
+ 'LABELS-HAVE-COLONS',
'LANDSCAPE',
- 'LANGUAGE',
+ 'LANGUAGE',
'LANGUAGES',
'LARGE',
'LARGE-TO-SMALL',
@@ -1271,7 +1271,7 @@ OPENEDGEKEYWORDS = (
'LAST-ASYNCH-REQUEST',
'LAST-BATCH',
'LAST-CHILD',
- 'LAST-EVEN',
+ 'LAST-EVEN',
'LAST-EVENT',
'LAST-FORM',
'LAST-KEY',
@@ -1281,20 +1281,20 @@ OPENEDGEKEYWORDS = (
'LAST-PROCED',
'LAST-PROCEDU',
'LAST-PROCEDUR',
- 'LAST-PROCEDURE',
+ 'LAST-PROCEDURE',
'LAST-SERVER',
'LAST-TAB-I',
'LAST-TAB-IT',
'LAST-TAB-ITE',
- 'LAST-TAB-ITEM',
- 'LASTKEY',
+ 'LAST-TAB-ITEM',
+ 'LASTKEY',
'LC',
'LDBNAME',
'LE',
'LEAVE',
'LEFT-ALIGN',
'LEFT-ALIGNE',
- 'LEFT-ALIGNED',
+ 'LEFT-ALIGNED',
'LEFT-TRIM',
'LENGTH',
'LIBRARY',
@@ -1303,7 +1303,7 @@ OPENEDGEKEYWORDS = (
'LINE',
'LINE-COUNT',
'LINE-COUNTE',
- 'LINE-COUNTER',
+ 'LINE-COUNTER',
'LIST-EVENTS',
'LIST-ITEM-PAIRS',
'LIST-ITEMS',
@@ -1311,9 +1311,9 @@ OPENEDGEKEYWORDS = (
'LIST-QUERY-ATTRS',
'LIST-SET-ATTRS',
'LIST-WIDGETS',
- 'LISTI',
- 'LISTIN',
- 'LISTING',
+ 'LISTI',
+ 'LISTIN',
+ 'LISTING',
'LITERAL-QUESTION',
'LITTLE-ENDIAN',
'LOAD',
@@ -1329,27 +1329,27 @@ OPENEDGEKEYWORDS = (
'LOAD-MOUSE-POIN',
'LOAD-MOUSE-POINT',
'LOAD-MOUSE-POINTE',
- 'LOAD-MOUSE-POINTER',
+ 'LOAD-MOUSE-POINTER',
'LOAD-PICTURE',
'LOAD-SMALL-ICON',
'LOCAL-NAME',
- 'LOCAL-VERSION-INFO',
+ 'LOCAL-VERSION-INFO',
'LOCATOR-COLUMN-NUMBER',
'LOCATOR-LINE-NUMBER',
'LOCATOR-PUBLIC-ID',
'LOCATOR-SYSTEM-ID',
'LOCATOR-TYPE',
- 'LOCK-REGISTRATION',
+ 'LOCK-REGISTRATION',
'LOCKED',
'LOG',
'LOG-AUDIT-EVENT',
- 'LOG-MANAGER',
- 'LOGICAL',
+ 'LOG-MANAGER',
+ 'LOGICAL',
'LOGIN-EXPIRATION-TIMESTAMP',
'LOGIN-HOST',
'LOGIN-STATE',
'LOGOUT',
- 'LONGCHAR',
+ 'LONGCHAR',
'LOOKAHEAD',
'LOOKUP',
'LT',
@@ -1364,30 +1364,30 @@ OPENEDGEKEYWORDS = (
'MARGIN-HEIGHT-CH',
'MARGIN-HEIGHT-CHA',
'MARGIN-HEIGHT-CHAR',
- 'MARGIN-HEIGHT-CHARS',
+ 'MARGIN-HEIGHT-CHARS',
'MARGIN-HEIGHT-P',
'MARGIN-HEIGHT-PI',
'MARGIN-HEIGHT-PIX',
'MARGIN-HEIGHT-PIXE',
'MARGIN-HEIGHT-PIXEL',
- 'MARGIN-HEIGHT-PIXELS',
+ 'MARGIN-HEIGHT-PIXELS',
'MARGIN-WIDTH',
'MARGIN-WIDTH-',
'MARGIN-WIDTH-C',
'MARGIN-WIDTH-CH',
'MARGIN-WIDTH-CHA',
'MARGIN-WIDTH-CHAR',
- 'MARGIN-WIDTH-CHARS',
+ 'MARGIN-WIDTH-CHARS',
'MARGIN-WIDTH-P',
'MARGIN-WIDTH-PI',
'MARGIN-WIDTH-PIX',
'MARGIN-WIDTH-PIXE',
'MARGIN-WIDTH-PIXEL',
- 'MARGIN-WIDTH-PIXELS',
+ 'MARGIN-WIDTH-PIXELS',
'MARK-NEW',
'MARK-ROW-STATE',
'MATCHES',
- 'MAX',
+ 'MAX',
'MAX-BUTTON',
'MAX-CHARS',
'MAX-DATA-GUESS',
@@ -1396,37 +1396,37 @@ OPENEDGEKEYWORDS = (
'MAX-HEIGHT-CH',
'MAX-HEIGHT-CHA',
'MAX-HEIGHT-CHAR',
- 'MAX-HEIGHT-CHARS',
+ 'MAX-HEIGHT-CHARS',
'MAX-HEIGHT-P',
'MAX-HEIGHT-PI',
'MAX-HEIGHT-PIX',
'MAX-HEIGHT-PIXE',
'MAX-HEIGHT-PIXEL',
- 'MAX-HEIGHT-PIXELS',
+ 'MAX-HEIGHT-PIXELS',
'MAX-ROWS',
'MAX-SIZE',
'MAX-VAL',
'MAX-VALU',
- 'MAX-VALUE',
+ 'MAX-VALUE',
'MAX-WIDTH',
'MAX-WIDTH-',
'MAX-WIDTH-C',
'MAX-WIDTH-CH',
'MAX-WIDTH-CHA',
'MAX-WIDTH-CHAR',
- 'MAX-WIDTH-CHARS',
+ 'MAX-WIDTH-CHARS',
'MAX-WIDTH-P',
'MAX-WIDTH-PI',
'MAX-WIDTH-PIX',
'MAX-WIDTH-PIXE',
'MAX-WIDTH-PIXEL',
- 'MAX-WIDTH-PIXELS',
- 'MAXI',
- 'MAXIM',
- 'MAXIMIZE',
- 'MAXIMU',
- 'MAXIMUM',
- 'MAXIMUM-LEVEL',
+ 'MAX-WIDTH-PIXELS',
+ 'MAXI',
+ 'MAXIM',
+ 'MAXIMIZE',
+ 'MAXIMU',
+ 'MAXIMUM',
+ 'MAXIMUM-LEVEL',
'MD5-DIGEST',
'MEMBER',
'MEMPTR-TO-NODE-VALUE',
@@ -1435,71 +1435,71 @@ OPENEDGEKEYWORDS = (
'MENU-ITEM',
'MENU-K',
'MENU-KE',
- 'MENU-KEY',
+ 'MENU-KEY',
'MENU-M',
'MENU-MO',
'MENU-MOU',
'MENU-MOUS',
- 'MENU-MOUSE',
- 'MENUBAR',
+ 'MENU-MOUSE',
+ 'MENUBAR',
'MERGE-BY-FIELD',
'MESSAGE',
'MESSAGE-AREA',
'MESSAGE-AREA-FONT',
'MESSAGE-LINES',
'METHOD',
- 'MIN',
+ 'MIN',
'MIN-BUTTON',
'MIN-COLUMN-WIDTH-C',
'MIN-COLUMN-WIDTH-CH',
'MIN-COLUMN-WIDTH-CHA',
'MIN-COLUMN-WIDTH-CHAR',
- 'MIN-COLUMN-WIDTH-CHARS',
+ 'MIN-COLUMN-WIDTH-CHARS',
'MIN-COLUMN-WIDTH-P',
'MIN-COLUMN-WIDTH-PI',
'MIN-COLUMN-WIDTH-PIX',
'MIN-COLUMN-WIDTH-PIXE',
'MIN-COLUMN-WIDTH-PIXEL',
- 'MIN-COLUMN-WIDTH-PIXELS',
+ 'MIN-COLUMN-WIDTH-PIXELS',
'MIN-HEIGHT',
'MIN-HEIGHT-',
'MIN-HEIGHT-C',
'MIN-HEIGHT-CH',
'MIN-HEIGHT-CHA',
'MIN-HEIGHT-CHAR',
- 'MIN-HEIGHT-CHARS',
+ 'MIN-HEIGHT-CHARS',
'MIN-HEIGHT-P',
'MIN-HEIGHT-PI',
'MIN-HEIGHT-PIX',
'MIN-HEIGHT-PIXE',
'MIN-HEIGHT-PIXEL',
- 'MIN-HEIGHT-PIXELS',
+ 'MIN-HEIGHT-PIXELS',
'MIN-SIZE',
'MIN-VAL',
'MIN-VALU',
- 'MIN-VALUE',
+ 'MIN-VALUE',
'MIN-WIDTH',
'MIN-WIDTH-',
'MIN-WIDTH-C',
'MIN-WIDTH-CH',
'MIN-WIDTH-CHA',
'MIN-WIDTH-CHAR',
- 'MIN-WIDTH-CHARS',
+ 'MIN-WIDTH-CHARS',
'MIN-WIDTH-P',
'MIN-WIDTH-PI',
'MIN-WIDTH-PIX',
'MIN-WIDTH-PIXE',
'MIN-WIDTH-PIXEL',
- 'MIN-WIDTH-PIXELS',
- 'MINI',
- 'MINIM',
- 'MINIMU',
- 'MINIMUM',
- 'MOD',
+ 'MIN-WIDTH-PIXELS',
+ 'MINI',
+ 'MINIM',
+ 'MINIMU',
+ 'MINIMUM',
+ 'MOD',
'MODIFIED',
'MODU',
'MODUL',
- 'MODULO',
+ 'MODULO',
'MONTH',
'MOUSE',
'MOUSE-P',
@@ -1508,7 +1508,7 @@ OPENEDGEKEYWORDS = (
'MOUSE-POIN',
'MOUSE-POINT',
'MOUSE-POINTE',
- 'MOUSE-POINTER',
+ 'MOUSE-POINTER',
'MOVABLE',
'MOVE-AFTER',
'MOVE-AFTER-',
@@ -1519,7 +1519,7 @@ OPENEDGEKEYWORDS = (
'MOVE-AFTER-TAB-I',
'MOVE-AFTER-TAB-IT',
'MOVE-AFTER-TAB-ITE',
- 'MOVE-AFTER-TAB-ITEM',
+ 'MOVE-AFTER-TAB-ITEM',
'MOVE-BEFOR',
'MOVE-BEFORE',
'MOVE-BEFORE-',
@@ -1530,23 +1530,23 @@ OPENEDGEKEYWORDS = (
'MOVE-BEFORE-TAB-I',
'MOVE-BEFORE-TAB-IT',
'MOVE-BEFORE-TAB-ITE',
- 'MOVE-BEFORE-TAB-ITEM',
+ 'MOVE-BEFORE-TAB-ITEM',
'MOVE-COL',
'MOVE-COLU',
'MOVE-COLUM',
- 'MOVE-COLUMN',
+ 'MOVE-COLUMN',
'MOVE-TO-B',
'MOVE-TO-BO',
'MOVE-TO-BOT',
'MOVE-TO-BOTT',
'MOVE-TO-BOTTO',
- 'MOVE-TO-BOTTOM',
+ 'MOVE-TO-BOTTOM',
'MOVE-TO-EOF',
'MOVE-TO-T',
'MOVE-TO-TO',
- 'MOVE-TO-TOP',
+ 'MOVE-TO-TOP',
'MPE',
- 'MTIME',
+ 'MTIME',
'MULTI-COMPILE',
'MULTIPLE',
'MULTIPLE-KEY',
@@ -1570,7 +1570,7 @@ OPENEDGEKEYWORDS = (
'NEXT-TAB-I',
'NEXT-TAB-IT',
'NEXT-TAB-ITE',
- 'NEXT-TAB-ITEM',
+ 'NEXT-TAB-ITEM',
'NEXT-VALUE',
'NO',
'NO-APPLY',
@@ -1581,12 +1581,12 @@ OPENEDGEKEYWORDS = (
'NO-ATTR-L',
'NO-ATTR-LI',
'NO-ATTR-LIS',
- 'NO-ATTR-LIST',
+ 'NO-ATTR-LIST',
'NO-ATTR-S',
'NO-ATTR-SP',
'NO-ATTR-SPA',
'NO-ATTR-SPAC',
- 'NO-ATTR-SPACE',
+ 'NO-ATTR-SPACE',
'NO-AUTO-VALIDATE',
'NO-BIND-WHERE',
'NO-BOX',
@@ -1602,21 +1602,21 @@ OPENEDGEKEYWORDS = (
'NO-F',
'NO-FI',
'NO-FIL',
- 'NO-FILL',
+ 'NO-FILL',
'NO-FOCUS',
'NO-HELP',
'NO-HIDE',
'NO-INDEX-HINT',
'NO-INHERIT-BGC',
'NO-INHERIT-BGCO',
- 'NO-INHERIT-BGCOLOR',
+ 'NO-INHERIT-BGCOLOR',
'NO-INHERIT-FGC',
'NO-INHERIT-FGCO',
'NO-INHERIT-FGCOL',
'NO-INHERIT-FGCOLO',
- 'NO-INHERIT-FGCOLOR',
+ 'NO-INHERIT-FGCOLOR',
'NO-JOIN-BY-SQLDB',
- 'NO-LABE',
+ 'NO-LABE',
'NO-LABELS',
'NO-LOBS',
'NO-LOCK',
@@ -1626,12 +1626,12 @@ OPENEDGEKEYWORDS = (
'NO-MESS',
'NO-MESSA',
'NO-MESSAG',
- 'NO-MESSAGE',
+ 'NO-MESSAGE',
'NO-PAUSE',
'NO-PREFE',
'NO-PREFET',
'NO-PREFETC',
- 'NO-PREFETCH',
+ 'NO-PREFETCH',
'NO-ROW-MARKERS',
'NO-SCROLLBAR-VERTICAL',
'NO-SEPARATE-CONNECTION',
@@ -1643,40 +1643,40 @@ OPENEDGEKEYWORDS = (
'NO-UNDERL',
'NO-UNDERLI',
'NO-UNDERLIN',
- 'NO-UNDERLINE',
+ 'NO-UNDERLINE',
'NO-UNDO',
'NO-VAL',
'NO-VALI',
'NO-VALID',
'NO-VALIDA',
'NO-VALIDAT',
- 'NO-VALIDATE',
+ 'NO-VALIDATE',
'NO-WAIT',
'NO-WORD-WRAP',
- 'NODE-VALUE-TO-MEMPTR',
- 'NONAMESPACE-SCHEMA-LOCATION',
- 'NONE',
- 'NORMALIZE',
- 'NOT',
- 'NOT-ACTIVE',
- 'NOW',
+ 'NODE-VALUE-TO-MEMPTR',
+ 'NONAMESPACE-SCHEMA-LOCATION',
+ 'NONE',
+ 'NORMALIZE',
+ 'NOT',
+ 'NOT-ACTIVE',
+ 'NOW',
'NULL',
'NUM-ALI',
'NUM-ALIA',
'NUM-ALIAS',
'NUM-ALIASE',
- 'NUM-ALIASES',
+ 'NUM-ALIASES',
'NUM-BUFFERS',
'NUM-BUT',
'NUM-BUTT',
'NUM-BUTTO',
'NUM-BUTTON',
- 'NUM-BUTTONS',
+ 'NUM-BUTTONS',
'NUM-COL',
'NUM-COLU',
'NUM-COLUM',
'NUM-COLUMN',
- 'NUM-COLUMNS',
+ 'NUM-COLUMNS',
'NUM-COPIES',
'NUM-DBS',
'NUM-DROPPED-FILES',
@@ -1690,7 +1690,7 @@ OPENEDGEKEYWORDS = (
'NUM-LOCKED-COLU',
'NUM-LOCKED-COLUM',
'NUM-LOCKED-COLUMN',
- 'NUM-LOCKED-COLUMNS',
+ 'NUM-LOCKED-COLUMNS',
'NUM-MESSAGES',
'NUM-PARAMETERS',
'NUM-REFERENCES',
@@ -1698,24 +1698,24 @@ OPENEDGEKEYWORDS = (
'NUM-RESULTS',
'NUM-SELECTED',
'NUM-SELECTED-',
- 'NUM-SELECTED-ROWS',
+ 'NUM-SELECTED-ROWS',
'NUM-SELECTED-W',
'NUM-SELECTED-WI',
'NUM-SELECTED-WID',
'NUM-SELECTED-WIDG',
'NUM-SELECTED-WIDGE',
'NUM-SELECTED-WIDGET',
- 'NUM-SELECTED-WIDGETS',
+ 'NUM-SELECTED-WIDGETS',
'NUM-TABS',
'NUM-TO-RETAIN',
'NUM-VISIBLE-COLUMNS',
- 'NUMERIC',
- 'NUMERIC-F',
- 'NUMERIC-FO',
- 'NUMERIC-FOR',
- 'NUMERIC-FORM',
- 'NUMERIC-FORMA',
- 'NUMERIC-FORMAT',
+ 'NUMERIC',
+ 'NUMERIC-F',
+ 'NUMERIC-FO',
+ 'NUMERIC-FOR',
+ 'NUMERIC-FORM',
+ 'NUMERIC-FORMA',
+ 'NUMERIC-FORMAT',
'OCTET-LENGTH',
'OF',
'OFF',
@@ -1730,7 +1730,7 @@ OPENEDGEKEYWORDS = (
'ON-FRAME-BOR',
'ON-FRAME-BORD',
'ON-FRAME-BORDE',
- 'ON-FRAME-BORDER',
+ 'ON-FRAME-BORDER',
'OPEN',
'OPSYS',
'OPTION',
@@ -1743,7 +1743,7 @@ OPENEDGEKEYWORDS = (
'OS-CREATE-DIR',
'OS-DELETE',
'OS-DIR',
- 'OS-DRIVE',
+ 'OS-DRIVE',
'OS-DRIVES',
'OS-ERROR',
'OS-GETENV',
@@ -1757,22 +1757,22 @@ OPENEDGEKEYWORDS = (
'PAGE-BOT',
'PAGE-BOTT',
'PAGE-BOTTO',
- 'PAGE-BOTTOM',
+ 'PAGE-BOTTOM',
'PAGE-NUM',
'PAGE-NUMB',
'PAGE-NUMBE',
- 'PAGE-NUMBER',
+ 'PAGE-NUMBER',
'PAGE-SIZE',
'PAGE-TOP',
'PAGE-WID',
'PAGE-WIDT',
- 'PAGE-WIDTH',
- 'PAGED',
+ 'PAGE-WIDTH',
+ 'PAGED',
'PARAM',
'PARAME',
'PARAMET',
'PARAMETE',
- 'PARAMETER',
+ 'PARAMETER',
'PARENT',
'PARSE-STATUS',
'PARTIAL-KEY',
@@ -1786,54 +1786,54 @@ OPENEDGEKEYWORDS = (
'PBE-HASH-ALGORI',
'PBE-HASH-ALGORIT',
'PBE-HASH-ALGORITH',
- 'PBE-HASH-ALGORITHM',
+ 'PBE-HASH-ALGORITHM',
'PBE-KEY-ROUNDS',
'PDBNAME',
'PERSIST',
'PERSISTE',
'PERSISTEN',
- 'PERSISTENT',
+ 'PERSISTENT',
'PERSISTENT-CACHE-DISABLED',
'PFC',
'PFCO',
'PFCOL',
'PFCOLO',
- 'PFCOLOR',
+ 'PFCOLOR',
'PIXELS',
'PIXELS-PER-COL',
'PIXELS-PER-COLU',
'PIXELS-PER-COLUM',
- 'PIXELS-PER-COLUMN',
+ 'PIXELS-PER-COLUMN',
'PIXELS-PER-ROW',
'POPUP-M',
'POPUP-ME',
'POPUP-MEN',
- 'POPUP-MENU',
+ 'POPUP-MENU',
'POPUP-O',
'POPUP-ON',
'POPUP-ONL',
- 'POPUP-ONLY',
+ 'POPUP-ONLY',
'PORTRAIT',
'POSITION',
'PRECISION',
'PREFER-DATASET',
- 'PREPARE-STRING',
+ 'PREPARE-STRING',
'PREPARED',
'PREPROC',
'PREPROCE',
'PREPROCES',
- 'PREPROCESS',
+ 'PREPROCESS',
'PRESEL',
'PRESELE',
'PRESELEC',
- 'PRESELECT',
+ 'PRESELECT',
'PREV',
'PREV-COLUMN',
'PREV-SIBLING',
'PREV-TAB-I',
'PREV-TAB-IT',
'PREV-TAB-ITE',
- 'PREV-TAB-ITEM',
+ 'PREV-TAB-ITEM',
'PRIMARY',
'PRINTER',
'PRINTER-CONTROL-HANDLE',
@@ -1845,28 +1845,28 @@ OPENEDGEKEYWORDS = (
'PRIVATE-D',
'PRIVATE-DA',
'PRIVATE-DAT',
- 'PRIVATE-DATA',
+ 'PRIVATE-DATA',
'PRIVILEGES',
'PROC-HA',
'PROC-HAN',
'PROC-HAND',
'PROC-HANDL',
- 'PROC-HANDLE',
+ 'PROC-HANDLE',
'PROC-ST',
'PROC-STA',
'PROC-STAT',
'PROC-STATU',
- 'PROC-STATUS',
- 'PROC-TEXT',
- 'PROC-TEXT-BUFFER',
- 'PROCE',
- 'PROCED',
- 'PROCEDU',
- 'PROCEDUR',
- 'PROCEDURE',
- 'PROCEDURE-CALL-TYPE',
- 'PROCEDURE-TYPE',
- 'PROCESS',
+ 'PROC-STATUS',
+ 'PROC-TEXT',
+ 'PROC-TEXT-BUFFER',
+ 'PROCE',
+ 'PROCED',
+ 'PROCEDU',
+ 'PROCEDUR',
+ 'PROCEDURE',
+ 'PROCEDURE-CALL-TYPE',
+ 'PROCEDURE-TYPE',
+ 'PROCESS',
'PROFILER',
'PROGRAM-NAME',
'PROGRESS',
@@ -1875,11 +1875,11 @@ OPENEDGEKEYWORDS = (
'PROGRESS-SOU',
'PROGRESS-SOUR',
'PROGRESS-SOURC',
- 'PROGRESS-SOURCE',
+ 'PROGRESS-SOURCE',
'PROMPT',
'PROMPT-F',
'PROMPT-FO',
- 'PROMPT-FOR',
+ 'PROMPT-FOR',
'PROMSGS',
'PROPATH',
'PROPERTY',
@@ -1887,7 +1887,7 @@ OPENEDGEKEYWORDS = (
'PROVERS',
'PROVERSI',
'PROVERSIO',
- 'PROVERSION',
+ 'PROVERSION',
'PROXY',
'PROXY-PASSWORD',
'PROXY-USERID',
@@ -1902,12 +1902,12 @@ OPENEDGEKEYWORDS = (
'PUT-INT64',
'PUT-KEY-VAL',
'PUT-KEY-VALU',
- 'PUT-KEY-VALUE',
+ 'PUT-KEY-VALUE',
'PUT-LONG',
'PUT-SHORT',
'PUT-STRING',
'PUT-UNSIGNED-LONG',
- 'PUTBYTE',
+ 'PUTBYTE',
'QUERY',
'QUERY-CLOSE',
'QUERY-OFF-END',
@@ -1917,11 +1917,11 @@ OPENEDGEKEYWORDS = (
'QUESTION',
'QUIT',
'QUOTER',
- 'R-INDEX',
+ 'R-INDEX',
'RADIO-BUTTONS',
'RADIO-SET',
'RANDOM',
- 'RAW',
+ 'RAW',
'RAW-TRANSFER',
'RCODE-INFO',
'RCODE-INFOR',
@@ -1930,28 +1930,28 @@ OPENEDGEKEYWORDS = (
'RCODE-INFORMAT',
'RCODE-INFORMATI',
'RCODE-INFORMATIO',
- 'RCODE-INFORMATION',
+ 'RCODE-INFORMATION',
'READ-AVAILABLE',
'READ-EXACT-NUM',
'READ-FILE',
- 'READ-JSON',
+ 'READ-JSON',
'READ-ONLY',
'READ-XML',
'READ-XMLSCHEMA',
- 'READKEY',
+ 'READKEY',
'REAL',
- 'RECID',
+ 'RECID',
'RECORD-LENGTH',
'RECT',
'RECTA',
'RECTAN',
'RECTANG',
'RECTANGL',
- 'RECTANGLE',
+ 'RECTANGLE',
'RECURSIVE',
'REFERENCE-ONLY',
'REFRESH',
- 'REFRESH-AUDIT-POLICY',
+ 'REFRESH-AUDIT-POLICY',
'REFRESHABLE',
'REGISTER-DOMAIN',
'RELEASE',
@@ -1968,14 +1968,14 @@ OPENEDGEKEYWORDS = (
'REPOSITION-TO-ROW',
'REPOSITION-TO-ROWID',
'REQUEST',
- 'REQUEST-INFO',
+ 'REQUEST-INFO',
'RESET',
'RESIZA',
'RESIZAB',
'RESIZABL',
- 'RESIZABLE',
+ 'RESIZABLE',
'RESIZE',
- 'RESPONSE-INFO',
+ 'RESPONSE-INFO',
'RESTART-ROW',
'RESTART-ROWID',
'RETAIN',
@@ -1983,21 +1983,21 @@ OPENEDGEKEYWORDS = (
'RETRY',
'RETRY-CANCEL',
'RETURN',
- 'RETURN-ALIGN',
- 'RETURN-ALIGNE',
+ 'RETURN-ALIGN',
+ 'RETURN-ALIGNE',
'RETURN-INS',
'RETURN-INSE',
'RETURN-INSER',
'RETURN-INSERT',
'RETURN-INSERTE',
- 'RETURN-INSERTED',
- 'RETURN-TO-START-DI',
+ 'RETURN-INSERTED',
+ 'RETURN-TO-START-DI',
'RETURN-TO-START-DIR',
'RETURN-VAL',
'RETURN-VALU',
- 'RETURN-VALUE',
+ 'RETURN-VALUE',
'RETURN-VALUE-DATA-TYPE',
- 'RETURNS',
+ 'RETURNS',
'REVERSE-FROM',
'REVERT',
'REVOKE',
@@ -2013,17 +2013,17 @@ OPENEDGEKEYWORDS = (
'ROW-MARKERS',
'ROW-OF',
'ROW-RESIZABLE',
- 'ROWID',
+ 'ROWID',
'RULE',
'RUN',
'RUN-PROCEDURE',
- 'SAVE CACHE',
+ 'SAVE CACHE',
'SAVE',
'SAVE-AS',
'SAVE-FILE',
'SAX-COMPLE',
'SAX-COMPLET',
- 'SAX-COMPLETE',
+ 'SAX-COMPLETE',
'SAX-PARSE',
'SAX-PARSE-FIRST',
'SAX-PARSE-NEXT',
@@ -2036,7 +2036,7 @@ OPENEDGEKEYWORDS = (
'SAX-WRITE-ELEMENT',
'SAX-WRITE-ERROR',
'SAX-WRITE-IDLE',
- 'SAX-WRITE-TAG',
+ 'SAX-WRITE-TAG',
'SAX-WRITER',
'SCHEMA',
'SCHEMA-LOCATION',
@@ -2047,17 +2047,17 @@ OPENEDGEKEYWORDS = (
'SCREEN-LINES',
'SCREEN-VAL',
'SCREEN-VALU',
- 'SCREEN-VALUE',
+ 'SCREEN-VALUE',
'SCROLL',
- 'SCROLL-BARS',
- 'SCROLL-DELTA',
- 'SCROLL-OFFSET',
- 'SCROLL-TO-CURRENT-ROW',
- 'SCROLL-TO-I',
- 'SCROLL-TO-IT',
- 'SCROLL-TO-ITE',
- 'SCROLL-TO-ITEM',
- 'SCROLL-TO-SELECTED-ROW',
+ 'SCROLL-BARS',
+ 'SCROLL-DELTA',
+ 'SCROLL-OFFSET',
+ 'SCROLL-TO-CURRENT-ROW',
+ 'SCROLL-TO-I',
+ 'SCROLL-TO-IT',
+ 'SCROLL-TO-ITE',
+ 'SCROLL-TO-ITEM',
+ 'SCROLL-TO-SELECTED-ROW',
'SCROLLABLE',
'SCROLLBAR-H',
'SCROLLBAR-HO',
@@ -2068,7 +2068,7 @@ OPENEDGEKEYWORDS = (
'SCROLLBAR-HORIZON',
'SCROLLBAR-HORIZONT',
'SCROLLBAR-HORIZONTA',
- 'SCROLLBAR-HORIZONTAL',
+ 'SCROLLBAR-HORIZONTAL',
'SCROLLBAR-V',
'SCROLLBAR-VE',
'SCROLLBAR-VER',
@@ -2076,13 +2076,13 @@ OPENEDGEKEYWORDS = (
'SCROLLBAR-VERTI',
'SCROLLBAR-VERTIC',
'SCROLLBAR-VERTICA',
- 'SCROLLBAR-VERTICAL',
+ 'SCROLLBAR-VERTICAL',
'SCROLLED-ROW-POS',
'SCROLLED-ROW-POSI',
'SCROLLED-ROW-POSIT',
'SCROLLED-ROW-POSITI',
'SCROLLED-ROW-POSITIO',
- 'SCROLLED-ROW-POSITION',
+ 'SCROLLED-ROW-POSITION',
'SCROLLING',
'SDBNAME',
'SEAL',
@@ -2094,11 +2094,11 @@ OPENEDGEKEYWORDS = (
'SECURITY-POLICY',
'SEEK',
'SELECT',
- 'SELECT-ALL',
- 'SELECT-FOCUSED-ROW',
- 'SELECT-NEXT-ROW',
- 'SELECT-PREV-ROW',
- 'SELECT-ROW',
+ 'SELECT-ALL',
+ 'SELECT-FOCUSED-ROW',
+ 'SELECT-NEXT-ROW',
+ 'SELECT-PREV-ROW',
+ 'SELECT-ROW',
'SELECTABLE',
'SELECTED',
'SELECTION',
@@ -2108,14 +2108,14 @@ OPENEDGEKEYWORDS = (
'SELECTION-TEXT',
'SELF',
'SEND',
- 'SEND-SQL-STATEMENT',
+ 'SEND-SQL-STATEMENT',
'SENSITIVE',
'SEPARATE-CONNECTION',
'SEPARATOR-FGCOLOR',
'SEPARATORS',
- 'SERIALIZABLE',
- 'SERIALIZE-HIDDEN',
- 'SERIALIZE-NAME',
+ 'SERIALIZABLE',
+ 'SERIALIZE-HIDDEN',
+ 'SERIALIZE-NAME',
'SERVER',
'SERVER-CONNECTION-BOUND',
'SERVER-CONNECTION-BOUND-REQUEST',
@@ -2134,7 +2134,7 @@ OPENEDGEKEYWORDS = (
'SET-BLUE-VA',
'SET-BLUE-VAL',
'SET-BLUE-VALU',
- 'SET-BLUE-VALUE',
+ 'SET-BLUE-VALUE',
'SET-BREAK',
'SET-BUFFERS',
'SET-CALLBACK',
@@ -2151,7 +2151,7 @@ OPENEDGEKEYWORDS = (
'SET-GREEN-VA',
'SET-GREEN-VAL',
'SET-GREEN-VALU',
- 'SET-GREEN-VALUE',
+ 'SET-GREEN-VALUE',
'SET-INPUT-SOURCE',
'SET-OPTION',
'SET-OUTPUT-DESTINATION',
@@ -2164,61 +2164,61 @@ OPENEDGEKEYWORDS = (
'SET-RED-VA',
'SET-RED-VAL',
'SET-RED-VALU',
- 'SET-RED-VALUE',
+ 'SET-RED-VALUE',
'SET-REPOSITIONED-ROW',
'SET-RGB-VALUE',
'SET-ROLLBACK',
'SET-SELECTION',
'SET-SIZE',
'SET-SORT-ARROW',
- 'SET-WAIT-STATE',
+ 'SET-WAIT-STATE',
'SETUSER',
'SETUSERI',
- 'SETUSERID',
+ 'SETUSERID',
'SHA1-DIGEST',
'SHARE',
'SHARE-',
'SHARE-L',
'SHARE-LO',
'SHARE-LOC',
- 'SHARE-LOCK',
- 'SHARED',
+ 'SHARE-LOCK',
+ 'SHARED',
'SHOW-IN-TASKBAR',
- 'SHOW-STAT',
+ 'SHOW-STAT',
'SHOW-STATS',
- 'SIDE-LAB',
- 'SIDE-LABE',
- 'SIDE-LABEL',
+ 'SIDE-LAB',
+ 'SIDE-LABE',
+ 'SIDE-LABEL',
'SIDE-LABEL-H',
'SIDE-LABEL-HA',
'SIDE-LABEL-HAN',
'SIDE-LABEL-HAND',
'SIDE-LABEL-HANDL',
- 'SIDE-LABEL-HANDLE',
+ 'SIDE-LABEL-HANDLE',
'SIDE-LABELS',
- 'SIGNATURE',
+ 'SIGNATURE',
'SILENT',
'SIMPLE',
'SINGLE',
- 'SINGLE-RUN',
- 'SINGLETON',
+ 'SINGLE-RUN',
+ 'SINGLETON',
'SIZE',
'SIZE-C',
'SIZE-CH',
'SIZE-CHA',
'SIZE-CHAR',
- 'SIZE-CHARS',
+ 'SIZE-CHARS',
'SIZE-P',
'SIZE-PI',
'SIZE-PIX',
'SIZE-PIXE',
'SIZE-PIXEL',
- 'SIZE-PIXELS',
+ 'SIZE-PIXELS',
'SKIP',
'SKIP-DELETED-RECORD',
'SLIDER',
'SMALL-ICON',
- 'SMALL-TITLE',
+ 'SMALL-TITLE',
'SMALLINT',
'SOME',
'SORT',
@@ -2244,22 +2244,22 @@ OPENEDGEKEYWORDS = (
'STATUS-AREA-FONT',
'STDCALL',
'STOP',
- 'STOP-AFTER',
+ 'STOP-AFTER',
'STOP-PARSING',
- 'STOPPE',
+ 'STOPPE',
'STOPPED',
'STORED-PROC',
'STORED-PROCE',
'STORED-PROCED',
'STORED-PROCEDU',
'STORED-PROCEDUR',
- 'STORED-PROCEDURE',
+ 'STORED-PROCEDURE',
'STREAM',
'STREAM-HANDLE',
'STREAM-IO',
'STRETCH-TO-FIT',
'STRICT',
- 'STRICT-ENTITY-RESOLUTION',
+ 'STRICT-ENTITY-RESOLUTION',
'STRING',
'STRING-VALUE',
'STRING-XREF',
@@ -2267,30 +2267,30 @@ OPENEDGEKEYWORDS = (
'SUB-AVER',
'SUB-AVERA',
'SUB-AVERAG',
- 'SUB-AVERAGE',
+ 'SUB-AVERAGE',
'SUB-COUNT',
'SUB-MAXIMUM',
'SUB-MENU',
'SUB-MIN',
- 'SUB-MINIMUM',
- 'SUB-TOTAL',
+ 'SUB-MINIMUM',
+ 'SUB-TOTAL',
'SUBSCRIBE',
'SUBST',
'SUBSTI',
'SUBSTIT',
'SUBSTITU',
'SUBSTITUT',
- 'SUBSTITUTE',
+ 'SUBSTITUTE',
'SUBSTR',
'SUBSTRI',
'SUBSTRIN',
- 'SUBSTRING',
+ 'SUBSTRING',
'SUBTYPE',
'SUM',
- 'SUM-MAX',
- 'SUM-MAXI',
- 'SUM-MAXIM',
- 'SUM-MAXIMU',
+ 'SUM-MAX',
+ 'SUM-MAXI',
+ 'SUM-MAXIM',
+ 'SUM-MAXIMU',
'SUPER',
'SUPER-PROCEDURES',
'SUPPRESS-NAMESPACE-PROCESSING',
@@ -2301,7 +2301,7 @@ OPENEDGEKEYWORDS = (
'SUPPRESS-WARNI',
'SUPPRESS-WARNIN',
'SUPPRESS-WARNING',
- 'SUPPRESS-WARNINGS',
+ 'SUPPRESS-WARNINGS',
'SYMMETRIC-ENCRYPTION-ALGORITHM',
'SYMMETRIC-ENCRYPTION-IV',
'SYMMETRIC-ENCRYPTION-KEY',
@@ -2312,16 +2312,16 @@ OPENEDGEKEYWORDS = (
'SYSTEM-ALERT-BO',
'SYSTEM-ALERT-BOX',
'SYSTEM-ALERT-BOXE',
- 'SYSTEM-ALERT-BOXES',
+ 'SYSTEM-ALERT-BOXES',
'SYSTEM-DIALOG',
'SYSTEM-HELP',
'SYSTEM-ID',
- 'TAB-POSITION',
- 'TAB-STOP',
+ 'TAB-POSITION',
+ 'TAB-STOP',
'TABLE',
'TABLE-HANDLE',
'TABLE-NUMBER',
- 'TABLE-SCAN',
+ 'TABLE-SCAN',
'TARGET',
'TARGET-PROCEDURE',
'TEMP-DIR',
@@ -2330,14 +2330,14 @@ OPENEDGEKEYWORDS = (
'TEMP-DIRECT',
'TEMP-DIRECTO',
'TEMP-DIRECTOR',
- 'TEMP-DIRECTORY',
+ 'TEMP-DIRECTORY',
'TEMP-TABLE',
'TEMP-TABLE-PREPARE',
'TERM',
'TERMI',
'TERMIN',
'TERMINA',
- 'TERMINAL',
+ 'TERMINAL',
'TERMINATE',
'TEXT',
'TEXT-CURSOR',
@@ -2346,9 +2346,9 @@ OPENEDGEKEYWORDS = (
'THEN',
'THIS-OBJECT',
'THIS-PROCEDURE',
- 'THREAD-SAFE',
+ 'THREAD-SAFE',
'THREE-D',
- 'THROUGH',
+ 'THROUGH',
'THROW',
'THRU',
'TIC-MARKS',
@@ -2359,33 +2359,33 @@ OPENEDGEKEYWORDS = (
'TITLE-BGCO',
'TITLE-BGCOL',
'TITLE-BGCOLO',
- 'TITLE-BGCOLOR',
+ 'TITLE-BGCOLOR',
'TITLE-DC',
'TITLE-DCO',
'TITLE-DCOL',
'TITLE-DCOLO',
- 'TITLE-DCOLOR',
+ 'TITLE-DCOLOR',
'TITLE-FGC',
'TITLE-FGCO',
'TITLE-FGCOL',
'TITLE-FGCOLO',
- 'TITLE-FGCOLOR',
+ 'TITLE-FGCOLOR',
'TITLE-FO',
'TITLE-FON',
- 'TITLE-FONT',
+ 'TITLE-FONT',
'TO',
- 'TO-ROWID',
+ 'TO-ROWID',
'TODAY',
'TOGGLE-BOX',
'TOOLTIP',
'TOOLTIPS',
'TOP-NAV-QUERY',
'TOP-ONLY',
- 'TOPIC',
+ 'TOPIC',
'TOTAL',
'TRAILING',
'TRANS',
- 'TRANS-INIT-PROCEDURE',
+ 'TRANS-INIT-PROCEDURE',
'TRANSACTION',
'TRANSACTION-MODE',
'TRANSPARENT',
@@ -2396,7 +2396,7 @@ OPENEDGEKEYWORDS = (
'TRUNC',
'TRUNCA',
'TRUNCAT',
- 'TRUNCATE',
+ 'TRUNCATE',
'TYPE',
'TYPE-OF',
'UNBOX',
@@ -2404,18 +2404,18 @@ OPENEDGEKEYWORDS = (
'UNBUFFE',
'UNBUFFER',
'UNBUFFERE',
- 'UNBUFFERED',
+ 'UNBUFFERED',
'UNDERL',
'UNDERLI',
'UNDERLIN',
- 'UNDERLINE',
+ 'UNDERLINE',
'UNDO',
'UNFORM',
'UNFORMA',
'UNFORMAT',
'UNFORMATT',
'UNFORMATTE',
- 'UNFORMATTED',
+ 'UNFORMATTED',
'UNION',
'UNIQUE',
'UNIQUE-ID',
@@ -2441,15 +2441,15 @@ OPENEDGEKEYWORDS = (
'USE-TEXT',
'USE-UNDERLINE',
'USE-WIDGET-POOL',
- 'USER',
- 'USER-ID',
- 'USERID',
+ 'USER',
+ 'USER-ID',
+ 'USERID',
'USING',
'V6DISPLAY',
'V6FRAME',
- 'VALID-EVENT',
- 'VALID-HANDLE',
- 'VALID-OBJECT',
+ 'VALID-EVENT',
+ 'VALID-HANDLE',
+ 'VALID-OBJECT',
'VALIDATE',
'VALIDATE-EXPRESSION',
'VALIDATE-MESSAGE',
@@ -2463,14 +2463,14 @@ OPENEDGEKEYWORDS = (
'VARIA',
'VARIAB',
'VARIABL',
- 'VARIABLE',
+ 'VARIABLE',
'VERBOSE',
'VERSION',
'VERT',
'VERTI',
'VERTIC',
'VERTICA',
- 'VERTICAL',
+ 'VERTICAL',
'VIEW',
'VIEW-AS',
'VIEW-FIRST-COLUMN-ON-REOPEN',
@@ -2480,26 +2480,26 @@ OPENEDGEKEYWORDS = (
'VIRTUAL-HEIGHT-CH',
'VIRTUAL-HEIGHT-CHA',
'VIRTUAL-HEIGHT-CHAR',
- 'VIRTUAL-HEIGHT-CHARS',
+ 'VIRTUAL-HEIGHT-CHARS',
'VIRTUAL-HEIGHT-P',
'VIRTUAL-HEIGHT-PI',
'VIRTUAL-HEIGHT-PIX',
'VIRTUAL-HEIGHT-PIXE',
'VIRTUAL-HEIGHT-PIXEL',
- 'VIRTUAL-HEIGHT-PIXELS',
+ 'VIRTUAL-HEIGHT-PIXELS',
'VIRTUAL-WIDTH',
'VIRTUAL-WIDTH-',
'VIRTUAL-WIDTH-C',
'VIRTUAL-WIDTH-CH',
'VIRTUAL-WIDTH-CHA',
'VIRTUAL-WIDTH-CHAR',
- 'VIRTUAL-WIDTH-CHARS',
+ 'VIRTUAL-WIDTH-CHARS',
'VIRTUAL-WIDTH-P',
'VIRTUAL-WIDTH-PI',
'VIRTUAL-WIDTH-PIX',
'VIRTUAL-WIDTH-PIXE',
'VIRTUAL-WIDTH-PIXEL',
- 'VIRTUAL-WIDTH-PIXELS',
+ 'VIRTUAL-WIDTH-PIXELS',
'VISIBLE',
'VOID',
'WAIT',
@@ -2515,13 +2515,13 @@ OPENEDGEKEYWORDS = (
'WIDGET-EN',
'WIDGET-ENT',
'WIDGET-ENTE',
- 'WIDGET-ENTER',
+ 'WIDGET-ENTER',
'WIDGET-ID',
'WIDGET-L',
'WIDGET-LE',
'WIDGET-LEA',
'WIDGET-LEAV',
- 'WIDGET-LEAVE',
+ 'WIDGET-LEAVE',
'WIDGET-POOL',
'WIDTH',
'WIDTH-',
@@ -2529,29 +2529,29 @@ OPENEDGEKEYWORDS = (
'WIDTH-CH',
'WIDTH-CHA',
'WIDTH-CHAR',
- 'WIDTH-CHARS',
+ 'WIDTH-CHARS',
'WIDTH-P',
'WIDTH-PI',
'WIDTH-PIX',
'WIDTH-PIXE',
'WIDTH-PIXEL',
- 'WIDTH-PIXELS',
+ 'WIDTH-PIXELS',
'WINDOW',
'WINDOW-MAXIM',
'WINDOW-MAXIMI',
'WINDOW-MAXIMIZ',
'WINDOW-MAXIMIZE',
- 'WINDOW-MAXIMIZED',
+ 'WINDOW-MAXIMIZED',
'WINDOW-MINIM',
'WINDOW-MINIMI',
'WINDOW-MINIMIZ',
'WINDOW-MINIMIZE',
- 'WINDOW-MINIMIZED',
+ 'WINDOW-MINIMIZED',
'WINDOW-NAME',
'WINDOW-NORMAL',
'WINDOW-STA',
'WINDOW-STAT',
- 'WINDOW-STATE',
+ 'WINDOW-STATE',
'WINDOW-SYSTEM',
'WITH',
'WORD-INDEX',
@@ -2562,8 +2562,8 @@ OPENEDGEKEYWORDS = (
'WORK-AREA-Y',
'WORK-TAB',
'WORK-TABL',
- 'WORK-TABLE',
- 'WORKFILE',
+ 'WORK-TABLE',
+ 'WORKFILE',
'WRITE',
'WRITE-CDATA',
'WRITE-CHARACTERS',
@@ -2573,28 +2573,28 @@ OPENEDGEKEYWORDS = (
'WRITE-ENTITY-REF',
'WRITE-EXTERNAL-DTD',
'WRITE-FRAGMENT',
- 'WRITE-JSON',
+ 'WRITE-JSON',
'WRITE-MESSAGE',
'WRITE-PROCESSING-INSTRUCTION',
'WRITE-STATUS',
'WRITE-XML',
'WRITE-XMLSCHEMA',
'X',
- 'X-OF',
+ 'X-OF',
'XCODE',
'XML-DATA-TYPE',
- 'XML-ENTITY-EXPANSION-LIMIT',
+ 'XML-ENTITY-EXPANSION-LIMIT',
'XML-NODE-TYPE',
'XML-SCHEMA-PATH',
- 'XML-STRICT-ENTITY-RESOLUTION',
+ 'XML-STRICT-ENTITY-RESOLUTION',
'XML-SUPPRESS-NAMESPACE-PROCESSING',
'XREF',
'XREF-XML',
'Y',
- 'Y-OF',
+ 'Y-OF',
'YEAR',
'YEAR-OFFSET',
'YES',
'YES-NO',
- 'YES-NO-CANCEL'
+ 'YES-NO-CANCEL'
)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py
index 168cb4460b..391de6562d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py
@@ -11,7 +11,7 @@
internet connection. don't run that at home, use
a server ;-)
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -4726,7 +4726,7 @@ if __name__ == '__main__': # pragma: no cover
download = urlretrieve(PHP_MANUAL_URL)
with tarfile.open(download[0]) as tar:
tar.extractall()
- yield from glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB))
+ yield from glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB))
os.remove(download[0])
def regenerate(filename, modules):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py
index 93bc42059a..990b11d409 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py
@@ -4,7 +4,7 @@
Self-updating data files for PostgreSQL lexer.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -35,7 +35,7 @@ KEYWORDS = (
'ASSIGNMENT',
'ASYMMETRIC',
'AT',
- 'ATTACH',
+ 'ATTACH',
'ATTRIBUTE',
'AUTHORIZATION',
'BACKWARD',
@@ -49,7 +49,7 @@ KEYWORDS = (
'BOTH',
'BY',
'CACHE',
- 'CALL',
+ 'CALL',
'CALLED',
'CASCADE',
'CASCADED',
@@ -69,14 +69,14 @@ KEYWORDS = (
'COLLATE',
'COLLATION',
'COLUMN',
- 'COLUMNS',
+ 'COLUMNS',
'COMMENT',
'COMMENTS',
'COMMIT',
'COMMITTED',
'CONCURRENTLY',
'CONFIGURATION',
- 'CONFLICT',
+ 'CONFLICT',
'CONNECTION',
'CONSTRAINT',
'CONSTRAINTS',
@@ -88,7 +88,7 @@ KEYWORDS = (
'CREATE',
'CROSS',
'CSV',
- 'CUBE',
+ 'CUBE',
'CURRENT',
'CURRENT_CATALOG',
'CURRENT_DATE',
@@ -114,9 +114,9 @@ KEYWORDS = (
'DELETE',
'DELIMITER',
'DELIMITERS',
- 'DEPENDS',
+ 'DEPENDS',
'DESC',
- 'DETACH',
+ 'DETACH',
'DICTIONARY',
'DISABLE',
'DISCARD',
@@ -142,7 +142,7 @@ KEYWORDS = (
'EXECUTE',
'EXISTS',
'EXPLAIN',
- 'EXPRESSION',
+ 'EXPRESSION',
'EXTENSION',
'EXTERNAL',
'EXTRACT',
@@ -162,14 +162,14 @@ KEYWORDS = (
'FULL',
'FUNCTION',
'FUNCTIONS',
- 'GENERATED',
+ 'GENERATED',
'GLOBAL',
'GRANT',
'GRANTED',
'GREATEST',
'GROUP',
- 'GROUPING',
- 'GROUPS',
+ 'GROUPING',
+ 'GROUPS',
'HANDLER',
'HAVING',
'HEADER',
@@ -181,9 +181,9 @@ KEYWORDS = (
'IMMEDIATE',
'IMMUTABLE',
'IMPLICIT',
- 'IMPORT',
+ 'IMPORT',
'IN',
- 'INCLUDE',
+ 'INCLUDE',
'INCLUDING',
'INCREMENT',
'INDEX',
@@ -228,13 +228,13 @@ KEYWORDS = (
'LOCALTIMESTAMP',
'LOCATION',
'LOCK',
- 'LOCKED',
- 'LOGGED',
+ 'LOCKED',
+ 'LOGGED',
'MAPPING',
'MATCH',
'MATERIALIZED',
'MAXVALUE',
- 'METHOD',
+ 'METHOD',
'MINUTE',
'MINVALUE',
'MODE',
@@ -245,16 +245,16 @@ KEYWORDS = (
'NATIONAL',
'NATURAL',
'NCHAR',
- 'NEW',
+ 'NEW',
'NEXT',
- 'NFC',
- 'NFD',
- 'NFKC',
- 'NFKD',
+ 'NFC',
+ 'NFD',
+ 'NFKC',
+ 'NFKD',
'NO',
'NONE',
- 'NORMALIZE',
- 'NORMALIZED',
+ 'NORMALIZE',
+ 'NORMALIZED',
'NOT',
'NOTHING',
'NOTIFY',
@@ -269,7 +269,7 @@ KEYWORDS = (
'OFF',
'OFFSET',
'OIDS',
- 'OLD',
+ 'OLD',
'ON',
'ONLY',
'OPERATOR',
@@ -278,16 +278,16 @@ KEYWORDS = (
'OR',
'ORDER',
'ORDINALITY',
- 'OTHERS',
+ 'OTHERS',
'OUT',
'OUTER',
'OVER',
'OVERLAPS',
'OVERLAY',
- 'OVERRIDING',
+ 'OVERRIDING',
'OWNED',
'OWNER',
- 'PARALLEL',
+ 'PARALLEL',
'PARSER',
'PARTIAL',
'PARTITION',
@@ -307,9 +307,9 @@ KEYWORDS = (
'PRIVILEGES',
'PROCEDURAL',
'PROCEDURE',
- 'PROCEDURES',
+ 'PROCEDURES',
'PROGRAM',
- 'PUBLICATION',
+ 'PUBLICATION',
'QUOTE',
'RANGE',
'READ',
@@ -319,7 +319,7 @@ KEYWORDS = (
'RECURSIVE',
'REF',
'REFERENCES',
- 'REFERENCING',
+ 'REFERENCING',
'REFRESH',
'REINDEX',
'RELATIVE',
@@ -337,15 +337,15 @@ KEYWORDS = (
'RIGHT',
'ROLE',
'ROLLBACK',
- 'ROLLUP',
- 'ROUTINE',
- 'ROUTINES',
+ 'ROLLUP',
+ 'ROUTINE',
+ 'ROUTINES',
'ROW',
'ROWS',
'RULE',
'SAVEPOINT',
'SCHEMA',
- 'SCHEMAS',
+ 'SCHEMAS',
'SCROLL',
'SEARCH',
'SECOND',
@@ -359,16 +359,16 @@ KEYWORDS = (
'SESSION_USER',
'SET',
'SETOF',
- 'SETS',
+ 'SETS',
'SHARE',
'SHOW',
'SIMILAR',
'SIMPLE',
- 'SKIP',
+ 'SKIP',
'SMALLINT',
'SNAPSHOT',
'SOME',
- 'SQL',
+ 'SQL',
'STABLE',
'STANDALONE',
'START',
@@ -377,31 +377,31 @@ KEYWORDS = (
'STDIN',
'STDOUT',
'STORAGE',
- 'STORED',
+ 'STORED',
'STRICT',
'STRIP',
- 'SUBSCRIPTION',
+ 'SUBSCRIPTION',
'SUBSTRING',
- 'SUPPORT',
+ 'SUPPORT',
'SYMMETRIC',
'SYSID',
'SYSTEM',
'TABLE',
'TABLES',
- 'TABLESAMPLE',
+ 'TABLESAMPLE',
'TABLESPACE',
'TEMP',
'TEMPLATE',
'TEMPORARY',
'TEXT',
'THEN',
- 'TIES',
+ 'TIES',
'TIME',
'TIMESTAMP',
'TO',
'TRAILING',
'TRANSACTION',
- 'TRANSFORM',
+ 'TRANSFORM',
'TREAT',
'TRIGGER',
'TRIM',
@@ -410,7 +410,7 @@ KEYWORDS = (
'TRUSTED',
'TYPE',
'TYPES',
- 'UESCAPE',
+ 'UESCAPE',
'UNBOUNDED',
'UNCOMMITTED',
'UNENCRYPTED',
@@ -453,12 +453,12 @@ KEYWORDS = (
'XMLELEMENT',
'XMLEXISTS',
'XMLFOREST',
- 'XMLNAMESPACES',
+ 'XMLNAMESPACES',
'XMLPARSE',
'XMLPI',
'XMLROOT',
'XMLSERIALIZE',
- 'XMLTABLE',
+ 'XMLTABLE',
'YEAR',
'YES',
'ZONE',
@@ -495,12 +495,12 @@ DATATYPES = (
'line',
'lseg',
'macaddr',
- 'macaddr8',
+ 'macaddr8',
'money',
'numeric',
'path',
'pg_lsn',
- 'pg_snapshot',
+ 'pg_snapshot',
'point',
'polygon',
'real',
@@ -528,27 +528,27 @@ DATATYPES = (
PSEUDO_TYPES = (
'any',
- 'anyarray',
- 'anycompatible',
- 'anycompatiblearray',
- 'anycompatiblenonarray',
- 'anycompatiblerange',
+ 'anyarray',
+ 'anycompatible',
+ 'anycompatiblearray',
+ 'anycompatiblenonarray',
+ 'anycompatiblerange',
'anyelement',
- 'anyenum',
+ 'anyenum',
'anynonarray',
'anyrange',
'cstring',
- 'event_trigger',
- 'fdw_handler',
- 'index_am_handler',
+ 'event_trigger',
+ 'fdw_handler',
+ 'index_am_handler',
'internal',
'language_handler',
- 'pg_ddl_command',
+ 'pg_ddl_command',
'record',
- 'table_am_handler',
+ 'table_am_handler',
'trigger',
- 'tsm_handler',
- 'unknown',
+ 'tsm_handler',
+ 'unknown',
'void',
)
@@ -573,26 +573,26 @@ if __name__ == '__main__': # pragma: no cover
# One man's constant is another man's variable.
SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
- KEYWORDS_URL = SOURCE_URL + '/src/include/parser/kwlist.h'
+ KEYWORDS_URL = SOURCE_URL + '/src/include/parser/kwlist.h'
DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
def update_myself():
- content = urlopen(DATATYPES_URL).read().decode('utf-8', errors='ignore')
- data_file = list(content.splitlines())
+ content = urlopen(DATATYPES_URL).read().decode('utf-8', errors='ignore')
+ data_file = list(content.splitlines())
datatypes = parse_datatypes(data_file)
pseudos = parse_pseudos(data_file)
- content = urlopen(KEYWORDS_URL).read().decode('utf-8', errors='ignore')
- keywords = parse_keywords(content)
-
+ content = urlopen(KEYWORDS_URL).read().decode('utf-8', errors='ignore')
+ keywords = parse_keywords(content)
+
update_consts(__file__, 'DATATYPES', datatypes)
update_consts(__file__, 'PSEUDO_TYPES', pseudos)
update_consts(__file__, 'KEYWORDS', keywords)
def parse_keywords(f):
kw = []
- for m in re.finditer(r'PG_KEYWORD\("(.+?)"', f):
- kw.append(m.group(1).upper())
+ for m in re.finditer(r'PG_KEYWORD\("(.+?)"', f):
+ kw.append(m.group(1).upper())
if not kw:
raise ValueError('no keyword found')
@@ -631,7 +631,7 @@ if __name__ == '__main__': # pragma: no cover
def parse_pseudos(f):
dt = []
re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
- re_entry = re.compile(r'\s*<entry><type>(.+?)</type></entry>')
+ re_entry = re.compile(r'\s*<entry><type>(.+?)</type></entry>')
re_end = re.compile(r'\s*</table>')
f = iter(f)
@@ -654,7 +654,7 @@ if __name__ == '__main__': # pragma: no cover
if not dt:
raise ValueError('pseudo datatypes not found')
- dt.sort()
+ dt.sort()
return dt
def update_consts(filename, constname, content):
@@ -671,7 +671,7 @@ if __name__ == '__main__': # pragma: no cover
new_block = format_lines(constname, content)
data = data[:m.start()] + new_block + data[m.end():]
- with open(filename, 'w', newline='\n') as f:
+ with open(filename, 'w', newline='\n') as f:
f.write(data)
update_myself()
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py
index e2cfcb9622..7b1a5ed1bf 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py
@@ -4,7 +4,7 @@
Builtin list for the ScilabLexer.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -3088,6 +3088,6 @@ mclose(fd)\n''' % var_type)
with open(__file__, 'w') as f:
f.write(header)
f.write('# Autogenerated\n\n')
- for k, v in sorted(new_data.items()):
+ for k, v in sorted(new_data.items()):
f.write(format_lines(k + '_kw', v) + '\n\n')
f.write(footer)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py
index 2977a2c04f..3611dfe37e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py
@@ -7,7 +7,7 @@
Do not edit the FUNCTIONS list by hand.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py
index f15167053a..058290d863 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py
@@ -5,7 +5,7 @@
This file contains the names of functions for Stan used by
``pygments.lexers.math.StanLexer. This is for Stan language version 2.17.0.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_stata_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_stata_builtins.py
index 0fabe36863..a31f1afbf3 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_stata_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_stata_builtins.py
@@ -4,7 +4,7 @@
Builtins for Stata
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -363,95 +363,95 @@ builtins_base = (
"ztsum_5", "zttoct_5", "ztvary_5", "ztweib_5"
)
-
-
+
+
builtins_functions = (
- "abbrev", "abs", "acos", "acosh", "asin", "asinh", "atan",
- "atan2", "atanh", "autocode", "betaden", "binomial",
- "binomialp", "binomialtail", "binormal", "bofd",
- "byteorder", "c", "_caller", "cauchy", "cauchyden",
- "cauchytail", "Cdhms", "ceil", "char", "chi2", "chi2den",
- "chi2tail", "Chms", "chop", "cholesky", "clip", "Clock",
- "clock", "cloglog", "Cmdyhms", "Cofc", "cofC", "Cofd", "cofd",
- "coleqnumb", "collatorlocale", "collatorversion",
- "colnfreeparms", "colnumb", "colsof", "comb", "cond", "corr",
- "cos", "cosh", "daily", "date", "day", "det", "dgammapda",
- "dgammapdada", "dgammapdadx", "dgammapdx", "dgammapdxdx",
- "dhms", "diag", "diag0cnt", "digamma", "dofb", "dofC", "dofc",
- "dofh", "dofm", "dofq", "dofw", "dofy", "dow", "doy",
- "dunnettprob", "e", "el", "esample", "epsdouble", "epsfloat",
- "exp", "expm1", "exponential", "exponentialden",
- "exponentialtail", "F", "Fden", "fileexists", "fileread",
- "filereaderror", "filewrite", "float", "floor", "fmtwidth",
- "frval", "_frval", "Ftail", "gammaden", "gammap", "gammaptail",
- "get", "hadamard", "halfyear", "halfyearly", "has_eprop", "hh",
- "hhC", "hms", "hofd", "hours", "hypergeometric",
- "hypergeometricp", "I", "ibeta", "ibetatail", "igaussian",
- "igaussianden", "igaussiantail", "indexnot", "inlist",
- "inrange", "int", "inv", "invbinomial", "invbinomialtail",
- "invcauchy", "invcauchytail", "invchi2", "invchi2tail",
- "invcloglog", "invdunnettprob", "invexponential",
- "invexponentialtail", "invF", "invFtail", "invgammap",
- "invgammaptail", "invibeta", "invibetatail", "invigaussian",
- "invigaussiantail", "invlaplace", "invlaplacetail",
- "invlogisticp", "invlogisticsp", "invlogisticmsp",
- "invlogistictailp", "invlogistictailsp", "invlogistictailmsp",
- "invlogit", "invnbinomial", "invnbinomialtail", "invnchi2",
- "invnchi2tail", "invnF", "invnFtail", "invnibeta",
- "invnormal", "invnt", "invnttail", "invpoisson",
- "invpoissontail", "invsym", "invt", "invttail", "invtukeyprob",
- "invweibullabp", "invweibullabgp", "invweibullphabp",
- "invweibullphabgp", "invweibullphtailabp",
- "invweibullphtailabgp", "invweibulltailabp",
- "invweibulltailabgp", "irecode", "issymmetric", "J", "laplace",
- "laplaceden", "laplacetail", "ln", "ln1m", "ln1p", "lncauchyden",
- "lnfactorial", "lngamma", "lnigammaden", "lnigaussianden",
- "lniwishartden", "lnlaplaceden", "lnmvnormalden", "lnnormal",
- "lnnormalden", "lnnormaldenxs", "lnnormaldenxms", "lnwishartden",
- "log", "log10", "log1m", "log1p", "logisticx", "logisticsx",
- "logisticmsx", "logisticdenx", "logisticdensx", "logisticdenmsx",
- "logistictailx", "logistictailsx", "logistictailmsx", "logit",
- "matmissing", "matrix", "matuniform", "max", "maxbyte",
- "maxdouble", "maxfloat", "maxint", "maxlong", "mdy", "mdyhms",
- "mi", "min", "minbyte", "mindouble", "minfloat", "minint",
- "minlong", "minutes", "missing", "mm", "mmC", "mod", "mofd",
- "month", "monthly", "mreldif", "msofhours", "msofminutes",
- "msofseconds", "nbetaden", "nbinomial", "nbinomialp",
- "nbinomialtail", "nchi2", "nchi2den", "nchi2tail", "nF",
- "nFden", "nFtail", "nibeta", "normal", "normalden",
- "normaldenxs", "normaldenxms", "npnchi2", "npnF", "npnt",
- "nt", "ntden", "nttail", "nullmat", "plural", "plurals1",
- "poisson", "poissonp", "poissontail", "qofd", "quarter",
- "quarterly", "r", "rbeta", "rbinomial", "rcauchy", "rchi2",
- "recode", "real", "regexm", "regexr", "regexs", "reldif",
- "replay", "return", "rexponential", "rgamma", "rhypergeometric",
- "rigaussian", "rlaplace", "rlogistic", "rlogistics",
- "rlogisticms", "rnbinomial", "rnormal", "rnormalm", "rnormalms",
- "round", "roweqnumb", "rownfreeparms", "rownumb", "rowsof",
- "rpoisson", "rt", "runiform", "runiformab", "runiformint",
- "rweibullab", "rweibullabg", "rweibullphab", "rweibullphabg",
- "s", "scalar", "seconds", "sign", "sin", "sinh",
- "smallestdouble", "soundex", "soundex_nara", "sqrt", "ss",
- "ssC", "strcat", "strdup", "string", "stringns", "stritrim",
- "strlen", "strlower", "strltrim", "strmatch", "strofreal",
- "strofrealns", "strpos", "strproper", "strreverse", "strrpos",
- "strrtrim", "strtoname", "strtrim", "strupper", "subinstr",
- "subinword", "substr", "sum", "sweep", "t", "tan", "tanh",
- "tC", "tc", "td", "tden", "th", "tin", "tm", "tobytes", "tq",
- "trace", "trigamma", "trunc", "ttail", "tukeyprob", "tw",
- "twithin", "uchar", "udstrlen", "udsubstr", "uisdigit",
- "uisletter", "ustrcompare", "ustrfix", "ustrfrom",
- "ustrinvalidcnt", "ustrleft", "ustrlen", "ustrlower",
- "ustrltrim", "ustrnormalize", "ustrpos", "ustrregexm",
- "ustrregexra", "ustrregexrf", "ustrregexs", "ustrreverse",
- "ustrright", "ustrrpos", "ustrrtrim", "ustrsortkey",
- "ustrtitle", "ustrto", "ustrtohex", "ustrtoname",
- "ustrtrim", "ustrunescape", "ustrupper", "ustrword",
- "ustrwordcount", "usubinstr", "usubstr", "vec", "vecdiag",
- "week", "weekly", "weibullabx", "weibullabgx", "weibulldenabx",
- "weibulldenabgx", "weibullphabx", "weibullphabgx",
- "weibullphdenabx", "weibullphdenabgx", "weibullphtailabx",
- "weibullphtailabgx", "weibulltailabx", "weibulltailabgx",
- "wofd", "word", "wordbreaklocale", "wordcount",
- "year", "yearly", "yh", "ym", "yofd", "yq", "yw"
+ "abbrev", "abs", "acos", "acosh", "asin", "asinh", "atan",
+ "atan2", "atanh", "autocode", "betaden", "binomial",
+ "binomialp", "binomialtail", "binormal", "bofd",
+ "byteorder", "c", "_caller", "cauchy", "cauchyden",
+ "cauchytail", "Cdhms", "ceil", "char", "chi2", "chi2den",
+ "chi2tail", "Chms", "chop", "cholesky", "clip", "Clock",
+ "clock", "cloglog", "Cmdyhms", "Cofc", "cofC", "Cofd", "cofd",
+ "coleqnumb", "collatorlocale", "collatorversion",
+ "colnfreeparms", "colnumb", "colsof", "comb", "cond", "corr",
+ "cos", "cosh", "daily", "date", "day", "det", "dgammapda",
+ "dgammapdada", "dgammapdadx", "dgammapdx", "dgammapdxdx",
+ "dhms", "diag", "diag0cnt", "digamma", "dofb", "dofC", "dofc",
+ "dofh", "dofm", "dofq", "dofw", "dofy", "dow", "doy",
+ "dunnettprob", "e", "el", "esample", "epsdouble", "epsfloat",
+ "exp", "expm1", "exponential", "exponentialden",
+ "exponentialtail", "F", "Fden", "fileexists", "fileread",
+ "filereaderror", "filewrite", "float", "floor", "fmtwidth",
+ "frval", "_frval", "Ftail", "gammaden", "gammap", "gammaptail",
+ "get", "hadamard", "halfyear", "halfyearly", "has_eprop", "hh",
+ "hhC", "hms", "hofd", "hours", "hypergeometric",
+ "hypergeometricp", "I", "ibeta", "ibetatail", "igaussian",
+ "igaussianden", "igaussiantail", "indexnot", "inlist",
+ "inrange", "int", "inv", "invbinomial", "invbinomialtail",
+ "invcauchy", "invcauchytail", "invchi2", "invchi2tail",
+ "invcloglog", "invdunnettprob", "invexponential",
+ "invexponentialtail", "invF", "invFtail", "invgammap",
+ "invgammaptail", "invibeta", "invibetatail", "invigaussian",
+ "invigaussiantail", "invlaplace", "invlaplacetail",
+ "invlogisticp", "invlogisticsp", "invlogisticmsp",
+ "invlogistictailp", "invlogistictailsp", "invlogistictailmsp",
+ "invlogit", "invnbinomial", "invnbinomialtail", "invnchi2",
+ "invnchi2tail", "invnF", "invnFtail", "invnibeta",
+ "invnormal", "invnt", "invnttail", "invpoisson",
+ "invpoissontail", "invsym", "invt", "invttail", "invtukeyprob",
+ "invweibullabp", "invweibullabgp", "invweibullphabp",
+ "invweibullphabgp", "invweibullphtailabp",
+ "invweibullphtailabgp", "invweibulltailabp",
+ "invweibulltailabgp", "irecode", "issymmetric", "J", "laplace",
+ "laplaceden", "laplacetail", "ln", "ln1m", "ln1p", "lncauchyden",
+ "lnfactorial", "lngamma", "lnigammaden", "lnigaussianden",
+ "lniwishartden", "lnlaplaceden", "lnmvnormalden", "lnnormal",
+ "lnnormalden", "lnnormaldenxs", "lnnormaldenxms", "lnwishartden",
+ "log", "log10", "log1m", "log1p", "logisticx", "logisticsx",
+ "logisticmsx", "logisticdenx", "logisticdensx", "logisticdenmsx",
+ "logistictailx", "logistictailsx", "logistictailmsx", "logit",
+ "matmissing", "matrix", "matuniform", "max", "maxbyte",
+ "maxdouble", "maxfloat", "maxint", "maxlong", "mdy", "mdyhms",
+ "mi", "min", "minbyte", "mindouble", "minfloat", "minint",
+ "minlong", "minutes", "missing", "mm", "mmC", "mod", "mofd",
+ "month", "monthly", "mreldif", "msofhours", "msofminutes",
+ "msofseconds", "nbetaden", "nbinomial", "nbinomialp",
+ "nbinomialtail", "nchi2", "nchi2den", "nchi2tail", "nF",
+ "nFden", "nFtail", "nibeta", "normal", "normalden",
+ "normaldenxs", "normaldenxms", "npnchi2", "npnF", "npnt",
+ "nt", "ntden", "nttail", "nullmat", "plural", "plurals1",
+ "poisson", "poissonp", "poissontail", "qofd", "quarter",
+ "quarterly", "r", "rbeta", "rbinomial", "rcauchy", "rchi2",
+ "recode", "real", "regexm", "regexr", "regexs", "reldif",
+ "replay", "return", "rexponential", "rgamma", "rhypergeometric",
+ "rigaussian", "rlaplace", "rlogistic", "rlogistics",
+ "rlogisticms", "rnbinomial", "rnormal", "rnormalm", "rnormalms",
+ "round", "roweqnumb", "rownfreeparms", "rownumb", "rowsof",
+ "rpoisson", "rt", "runiform", "runiformab", "runiformint",
+ "rweibullab", "rweibullabg", "rweibullphab", "rweibullphabg",
+ "s", "scalar", "seconds", "sign", "sin", "sinh",
+ "smallestdouble", "soundex", "soundex_nara", "sqrt", "ss",
+ "ssC", "strcat", "strdup", "string", "stringns", "stritrim",
+ "strlen", "strlower", "strltrim", "strmatch", "strofreal",
+ "strofrealns", "strpos", "strproper", "strreverse", "strrpos",
+ "strrtrim", "strtoname", "strtrim", "strupper", "subinstr",
+ "subinword", "substr", "sum", "sweep", "t", "tan", "tanh",
+ "tC", "tc", "td", "tden", "th", "tin", "tm", "tobytes", "tq",
+ "trace", "trigamma", "trunc", "ttail", "tukeyprob", "tw",
+ "twithin", "uchar", "udstrlen", "udsubstr", "uisdigit",
+ "uisletter", "ustrcompare", "ustrfix", "ustrfrom",
+ "ustrinvalidcnt", "ustrleft", "ustrlen", "ustrlower",
+ "ustrltrim", "ustrnormalize", "ustrpos", "ustrregexm",
+ "ustrregexra", "ustrregexrf", "ustrregexs", "ustrreverse",
+ "ustrright", "ustrrpos", "ustrrtrim", "ustrsortkey",
+ "ustrtitle", "ustrto", "ustrtohex", "ustrtoname",
+ "ustrtrim", "ustrunescape", "ustrupper", "ustrword",
+ "ustrwordcount", "usubinstr", "usubstr", "vec", "vecdiag",
+ "week", "weekly", "weibullabx", "weibullabgx", "weibulldenabx",
+ "weibulldenabgx", "weibullphabx", "weibullphabgx",
+ "weibullphdenabx", "weibullphdenabgx", "weibullphtailabx",
+ "weibullphtailabgx", "weibulltailabx", "weibulltailabgx",
+ "wofd", "word", "wordbreaklocale", "wordcount",
+ "year", "yearly", "yh", "ym", "yofd", "yq", "yw"
)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_tsql_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_tsql_builtins.py
index e72e5a5a87..66925ac645 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_tsql_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_tsql_builtins.py
@@ -4,7 +4,7 @@
These are manually translated lists from https://msdn.microsoft.com.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_usd_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_usd_builtins.py
index 28efd27115..e468f2c18a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_usd_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_usd_builtins.py
@@ -1,112 +1,112 @@
-"""
- pygments.lexers._usd_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- A collection of known USD-related keywords, attributes, and types.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-COMMON_ATTRIBUTES = [
- "extent",
- "xformOpOrder",
-]
-
-KEYWORDS = [
- "class",
- "clips",
- "custom",
- "customData",
- "def",
- "dictionary",
- "inherits",
- "over",
- "payload",
- "references",
- "rel",
- "subLayers",
- "timeSamples",
- "uniform",
- "variantSet",
- "variantSets",
- "variants",
-]
-
-OPERATORS = [
- "add",
- "append",
- "delete",
- "prepend",
- "reorder",
-]
-
-SPECIAL_NAMES = [
- "active",
- "apiSchemas",
- "defaultPrim",
- "elementSize",
- "endTimeCode",
- "hidden",
- "instanceable",
- "interpolation",
- "kind",
- "startTimeCode",
- "upAxis",
-]
-
-TYPES = [
- "asset",
- "bool",
- "color3d",
- "color3f",
- "color3h",
- "color4d",
- "color4f",
- "color4h",
- "double",
- "double2",
- "double3",
- "double4",
- "float",
- "float2",
- "float3",
- "float4",
- "frame4d",
- "half",
- "half2",
- "half3",
- "half4",
- "int",
- "int2",
- "int3",
- "int4",
- "keyword",
- "matrix2d",
- "matrix3d",
- "matrix4d",
- "normal3d",
- "normal3f",
- "normal3h",
- "point3d",
- "point3f",
- "point3h",
- "quatd",
- "quatf",
- "quath",
- "string",
- "syn",
- "token",
- "uchar",
- "uchar2",
- "uchar3",
- "uchar4",
- "uint",
- "uint2",
- "uint3",
- "uint4",
- "usdaType",
- "vector3d",
- "vector3f",
- "vector3h",
-]
+"""
+ pygments.lexers._usd_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ A collection of known USD-related keywords, attributes, and types.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+COMMON_ATTRIBUTES = [
+ "extent",
+ "xformOpOrder",
+]
+
+KEYWORDS = [
+ "class",
+ "clips",
+ "custom",
+ "customData",
+ "def",
+ "dictionary",
+ "inherits",
+ "over",
+ "payload",
+ "references",
+ "rel",
+ "subLayers",
+ "timeSamples",
+ "uniform",
+ "variantSet",
+ "variantSets",
+ "variants",
+]
+
+OPERATORS = [
+ "add",
+ "append",
+ "delete",
+ "prepend",
+ "reorder",
+]
+
+SPECIAL_NAMES = [
+ "active",
+ "apiSchemas",
+ "defaultPrim",
+ "elementSize",
+ "endTimeCode",
+ "hidden",
+ "instanceable",
+ "interpolation",
+ "kind",
+ "startTimeCode",
+ "upAxis",
+]
+
+TYPES = [
+ "asset",
+ "bool",
+ "color3d",
+ "color3f",
+ "color3h",
+ "color4d",
+ "color4f",
+ "color4h",
+ "double",
+ "double2",
+ "double3",
+ "double4",
+ "float",
+ "float2",
+ "float3",
+ "float4",
+ "frame4d",
+ "half",
+ "half2",
+ "half3",
+ "half4",
+ "int",
+ "int2",
+ "int3",
+ "int4",
+ "keyword",
+ "matrix2d",
+ "matrix3d",
+ "matrix4d",
+ "normal3d",
+ "normal3f",
+ "normal3h",
+ "point3d",
+ "point3f",
+ "point3h",
+ "quatd",
+ "quatf",
+ "quath",
+ "string",
+ "syn",
+ "token",
+ "uchar",
+ "uchar2",
+ "uchar3",
+ "uchar4",
+ "uint",
+ "uint2",
+ "uint3",
+ "uint4",
+ "usdaType",
+ "vector3d",
+ "vector3f",
+ "vector3h",
+]
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_vbscript_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_vbscript_builtins.py
index 7256a06f17..2a100e6a43 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_vbscript_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_vbscript_builtins.py
@@ -5,7 +5,7 @@
These are manually translated lists from
http://www.indusoft.com/pdf/VBScript%20Reference.pdf.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py
index 9690511304..76806d1ff4 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py
@@ -4,7 +4,7 @@
This file is autogenerated by scripts/get_vimkw.py
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/actionscript.py b/contrib/python/Pygments/py3/pygments/lexers/actionscript.py
index 28625586bd..eb049ae155 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/actionscript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/actionscript.py
@@ -4,7 +4,7 @@
Lexers for ActionScript and MXML.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -25,7 +25,7 @@ class ActionScriptLexer(RegexLexer):
"""
name = 'ActionScript'
- aliases = ['actionscript', 'as']
+ aliases = ['actionscript', 'as']
filenames = ['*.as']
mimetypes = ['application/x-actionscript', 'text/x-actionscript',
'text/actionscript']
@@ -36,7 +36,7 @@ class ActionScriptLexer(RegexLexer):
(r'\s+', Whitespace),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
- (r'/(\\\\|\\[^\\]|[^/\\\n])*/[gim]*', String.Regex),
+ (r'/(\\\\|\\[^\\]|[^/\\\n])*/[gim]*', String.Regex),
(r'[~^*!%&<>|+=:;,/?\\-]+', Operator),
(r'[{}\[\]();.]+', Punctuation),
(words((
@@ -104,16 +104,16 @@ class ActionScriptLexer(RegexLexer):
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
]
}
- def analyse_text(text):
- """This is only used to disambiguate between ActionScript and
- ActionScript3. We return 0 here; the ActionScript3 lexer will match
- AS3 variable definitions and that will hopefully suffice."""
- return 0
+ def analyse_text(text):
+ """This is only used to disambiguate between ActionScript and
+ ActionScript3. We return 0 here; the ActionScript3 lexer will match
+ AS3 variable definitions and that will hopefully suffice."""
+ return 0
class ActionScript3Lexer(RegexLexer):
"""
@@ -123,7 +123,7 @@ class ActionScript3Lexer(RegexLexer):
"""
name = 'ActionScript 3'
- aliases = ['actionscript3', 'as3']
+ aliases = ['actionscript3', 'as3']
filenames = ['*.as']
mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
'text/actionscript3']
@@ -148,7 +148,7 @@ class ActionScript3Lexer(RegexLexer):
bygroups(Keyword, Whitespace, Keyword.Type, Whitespace, Operator)),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
- (r'/(\\\\|\\[^\\]|[^\\\n])*/[gisx]*', String.Regex),
+ (r'/(\\\\|\\[^\\]|[^\\\n])*/[gisx]*', String.Regex),
(r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
(r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
@@ -168,8 +168,8 @@ class ActionScript3Lexer(RegexLexer):
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator),
],
'funcparams': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/agile.py b/contrib/python/Pygments/py3/pygments/lexers/agile.py
index e309624fa9..bbfab5e8b9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/agile.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/agile.py
@@ -4,7 +4,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/algebra.py b/contrib/python/Pygments/py3/pygments/lexers/algebra.py
index 3e5c47b8dd..78b4864995 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/algebra.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/algebra.py
@@ -4,7 +4,7 @@
Lexers for computer algebra systems.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -67,26 +67,26 @@ class GAPLexer(RegexLexer):
],
}
- def analyse_text(text):
- score = 0.0
-
- # Declaration part
- if re.search(
- r"(InstallTrueMethod|Declare(Attribute|Category|Filter|Operation" +
- r"|GlobalFunction|Synonym|SynonymAttr|Property))", text
- ):
- score += 0.7
-
- # Implementation part
- if re.search(
- r"(DeclareRepresentation|Install(GlobalFunction|Method|" +
- r"ImmediateMethod|OtherMethod)|New(Family|Type)|Objectify)", text
- ):
- score += 0.7
-
- return min(score, 1.0)
-
-
+ def analyse_text(text):
+ score = 0.0
+
+ # Declaration part
+ if re.search(
+ r"(InstallTrueMethod|Declare(Attribute|Category|Filter|Operation" +
+ r"|GlobalFunction|Synonym|SynonymAttr|Property))", text
+ ):
+ score += 0.7
+
+ # Implementation part
+ if re.search(
+ r"(DeclareRepresentation|Install(GlobalFunction|Method|" +
+ r"ImmediateMethod|OtherMethod)|New(Family|Type)|Objectify)", text
+ ):
+ score += 0.7
+
+ return min(score, 1.0)
+
+
class MathematicaLexer(RegexLexer):
"""
Lexer for `Mathematica <http://www.wolfram.com/mathematica/>`_ source code.
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ambient.py b/contrib/python/Pygments/py3/pygments/lexers/ambient.py
index 5f82804a03..1c09cd8f17 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ambient.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ambient.py
@@ -4,7 +4,7 @@
Lexers for AmbientTalk language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -25,7 +25,7 @@ class AmbientTalkLexer(RegexLexer):
"""
name = 'AmbientTalk'
filenames = ['*.at']
- aliases = ['ambienttalk', 'ambienttalk/2', 'at']
+ aliases = ['ambienttalk', 'ambienttalk/2', 'at']
mimetypes = ['text/x-ambienttalk']
flags = re.MULTILINE | re.DOTALL
@@ -43,7 +43,7 @@ class AmbientTalkLexer(RegexLexer):
(builtin, Name.Builtin),
(r'(true|false|nil)\b', Keyword.Constant),
(r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r'\|', Punctuation, 'arglist'),
(r'<:|[*^!%&<>+=,./?-]|:=', Operator),
(r"`[a-zA-Z_]\w*", String.Symbol),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/amdgpu.py b/contrib/python/Pygments/py3/pygments/lexers/amdgpu.py
index 756b773135..ab82c4ddb0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/amdgpu.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/amdgpu.py
@@ -1,53 +1,53 @@
-"""
- pygments.lexers.amdgpu
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the AMDGPU ISA assembly.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Name, Text, Keyword, Whitespace, Number, Comment
-
-import re
-
-__all__ = ['AMDGPULexer']
-
-
-class AMDGPULexer(RegexLexer):
- """
- For AMD GPU assembly.
-
- .. versionadded:: 2.8
- """
- name = 'AMDGPU'
- aliases = ['amdgpu']
- filenames = ['*.isa']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'[\r\n]+', Text),
- (r'(([a-z_0-9])*:([a-z_0-9])*)', Name.Attribute),
- (r'(\[|\]|\(|\)|,|\:|\&)', Text),
- (r'([;#]|//).*?\n', Comment.Single),
- (r'((s_)?(ds|buffer|flat|image)_[a-z0-9_]+)', Keyword.Reserved),
- (r'(_lo|_hi)', Name.Variable),
- (r'(vmcnt|lgkmcnt|expcnt)', Name.Attribute),
- (words((
- 'op', 'vaddr', 'vdata', 'soffset', 'srsrc', 'format',
- 'offset', 'offen', 'idxen', 'glc', 'dlc', 'slc', 'tfe', 'lds',
- 'lit', 'unorm'), suffix=r'\b'), Name.Attribute),
- (r'(label_[a-z0-9]+)', Keyword),
- (r'(_L[0-9]*)', Name.Variable),
- (r'(s|v)_[a-z0-9_]+', Keyword),
- (r'(v[0-9.]+|vcc|exec|v)', Name.Variable),
- (r's[0-9.]+|s', Name.Variable),
- (r'[0-9]+\.[^0-9]+', Number.Float),
- (r'(0[xX][a-z0-9]+)|([0-9]+)', Number.Integer)
- ]
- }
+"""
+ pygments.lexers.amdgpu
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the AMDGPU ISA assembly.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Name, Text, Keyword, Whitespace, Number, Comment
+
+import re
+
+__all__ = ['AMDGPULexer']
+
+
+class AMDGPULexer(RegexLexer):
+ """
+ For AMD GPU assembly.
+
+ .. versionadded:: 2.8
+ """
+ name = 'AMDGPU'
+ aliases = ['amdgpu']
+ filenames = ['*.isa']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+ (r'[\r\n]+', Text),
+ (r'(([a-z_0-9])*:([a-z_0-9])*)', Name.Attribute),
+ (r'(\[|\]|\(|\)|,|\:|\&)', Text),
+ (r'([;#]|//).*?\n', Comment.Single),
+ (r'((s_)?(ds|buffer|flat|image)_[a-z0-9_]+)', Keyword.Reserved),
+ (r'(_lo|_hi)', Name.Variable),
+ (r'(vmcnt|lgkmcnt|expcnt)', Name.Attribute),
+ (words((
+ 'op', 'vaddr', 'vdata', 'soffset', 'srsrc', 'format',
+ 'offset', 'offen', 'idxen', 'glc', 'dlc', 'slc', 'tfe', 'lds',
+ 'lit', 'unorm'), suffix=r'\b'), Name.Attribute),
+ (r'(label_[a-z0-9]+)', Keyword),
+ (r'(_L[0-9]*)', Name.Variable),
+ (r'(s|v)_[a-z0-9_]+', Keyword),
+ (r'(v[0-9.]+|vcc|exec|v)', Name.Variable),
+ (r's[0-9.]+|s', Name.Variable),
+ (r'[0-9]+\.[^0-9]+', Number.Float),
+ (r'(0[xX][a-z0-9]+)|([0-9]+)', Number.Integer)
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ampl.py b/contrib/python/Pygments/py3/pygments/lexers/ampl.py
index 1d6e329020..2cff4fc410 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ampl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ampl.py
@@ -4,7 +4,7 @@
Lexers for the AMPL language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/apdlexer.py b/contrib/python/Pygments/py3/pygments/lexers/apdlexer.py
index 162f038ef0..07f5ada0be 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/apdlexer.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/apdlexer.py
@@ -1,447 +1,447 @@
-"""
- pygments.lexers.apdlexer
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for ANSYS Parametric Design Language.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Comment, Keyword, Name, Text, Number, Operator, \
+"""
+ pygments.lexers.apdlexer
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for ANSYS Parametric Design Language.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Comment, Keyword, Name, Text, Number, Operator, \
String, Generic, Punctuation, Whitespace
-
-__all__ = ['apdlexer']
-
-
-class apdlexer(RegexLexer):
- """
- For APDL source code.
-
- .. versionadded:: 2.9
- """
- name = 'ANSYS parametric design language'
- aliases = ['ansys', 'apdl']
- filenames = ['*.ans']
- flags = re.IGNORECASE
-
- # list of elements
- elafunb = ("SURF152", "SURF153", "SURF154", "SURF156", "SHELL157",
- "SURF159", "LINK160", "BEAM161", "PLANE162",
- "SHELL163", "SOLID164", "COMBI165", "MASS166",
- "LINK167", "SOLID168", "TARGE169", "TARGE170",
- "CONTA171", "CONTA172", "CONTA173", "CONTA174",
- "CONTA175", "CONTA176", "CONTA177", "CONTA178",
- "PRETS179", "LINK180", "SHELL181", "PLANE182",
- "PLANE183", "MPC184", "SOLID185", "SOLID186",
- "SOLID187", "BEAM188", "BEAM189", "SOLSH190",
- "INTER192", "INTER193", "INTER194", "INTER195",
- "MESH200", "FOLLW201", "INTER202", "INTER203",
- "INTER204", "INTER205", "SHELL208", "SHELL209",
- "CPT212", "CPT213", "COMBI214", "CPT215", "CPT216",
- "CPT217", "FLUID220", "FLUID221", "PLANE223",
- "SOLID226", "SOLID227", "PLANE230", "SOLID231",
- "SOLID232", "PLANE233", "SOLID236", "SOLID237",
- "PLANE238", "SOLID239", "SOLID240", "HSFLD241",
- "HSFLD242", "SURF251", "SURF252", "REINF263",
- "REINF264", "REINF265", "SOLID272", "SOLID273",
- "SOLID278", "SOLID279", "SHELL281", "SOLID285",
- "PIPE288", "PIPE289", "ELBOW290", "USER300", "BEAM3",
- "BEAM4", "BEAM23", "BEAM24", "BEAM44", "BEAM54",
- "COMBIN7", "FLUID79", "FLUID80", "FLUID81", "FLUID141",
- "FLUID142", "INFIN9", "INFIN47", "PLANE13", "PLANE25",
- "PLANE42", "PLANE53", "PLANE67", "PLANE82", "PLANE83",
- "PLANE145", "PLANE146", "CONTAC12", "CONTAC52",
- "LINK1", "LINK8", "LINK10", "LINK32", "PIPE16",
- "PIPE17", "PIPE18", "PIPE20", "PIPE59", "PIPE60",
- "SHELL41", "SHELL43", "SHELL57", "SHELL63", "SHELL91",
- "SHELL93", "SHELL99", "SHELL150", "SOLID5", "SOLID45",
- "SOLID46", "SOLID65", "SOLID69", "SOLID92", "SOLID95",
- "SOLID117", "SOLID127", "SOLID128", "SOLID147",
- "SOLID148", "SOLID191", "VISCO88", "VISCO89",
- "VISCO106", "VISCO107", "VISCO108", "TRANS109")
-
- elafunc = ("PGRAPH", "/VT", "VTIN", "VTRFIL", "VTTEMP", "PGRSET",
- "VTCLR", "VTMETH", "VTRSLT", "VTVMOD", "PGSELE",
- "VTDISC", "VTMP", "VTSEC", "PGWRITE", "VTEVAL", "VTOP",
- "VTSFE", "POUTRES", "VTFREQ", "VTPOST", "VTSL",
- "FLDATA1-40", "HFPCSWP", "MSDATA", "MSVARY", "QFACT",
- "FLOCHECK", "HFPOWER", "MSMASS", "PERI", "SPADP",
- "FLREAD", "HFPORT", "MSMETH", "PLFSS", "SPARM",
- "FLOTRAN", "HFSCAT", "MSMIR", "PLSCH", "SPFSS",
- "HFADP", "ICE", "MSNOMF", "PLSYZ", "SPICE", "HFARRAY",
- "ICEDELE", "MSPROP", "PLTD", "SPSCAN", "HFDEEM",
- "ICELIST", "MSQUAD", "PLTLINE", "SPSWP", "HFEIGOPT",
- "ICVFRC", "MSRELAX", "PLVFRC", "HFEREFINE", "LPRT",
- "MSSOLU", "/PICE", "HFMODPRT", "MSADV", "MSSPEC",
- "PLWAVE", "HFPA", "MSCAP", "MSTERM", "PRSYZ")
-
- elafund = ("*VOPER", "VOVLAP", "*VPLOT", "VPLOT", "VPTN", "*VPUT",
- "VPUT", "*VREAD", "VROTAT", "VSBA", "VSBV", "VSBW",
- "/VSCALE", "*VSCFUN", "VSEL", "VSLA", "*VSTAT", "VSUM",
- "VSWEEP", "VSYMM", "VTRAN", "VTYPE", "/VUP", "*VWRITE",
- "/WAIT", "WAVES", "WERASE", "WFRONT", "/WINDOW",
- "WMID", "WMORE", "WPAVE", "WPCSYS", "WPLANE", "WPOFFS",
- "WPROTA", "WPSTYL", "WRFULL", "WRITE", "WRITEMAP",
- "*WRK", "WSORT", "WSPRINGS", "WSTART", "WTBCREATE",
- "XFDATA", "XFENRICH", "XFLIST", "/XFRM", "/XRANGE",
- "XVAR", "/YRANGE", "/ZOOM", "/WB", "XMLO", "/XML",
- "CNTR", "EBLOCK", "CMBLOCK", "NBLOCK", "/TRACK",
- "CWZPLOT", "~EUI", "NELE", "EALL", "NALL", "FLITEM",
- "LSLN", "PSOLVE", "ASLN", "/VERIFY", "/SSS", "~CFIN",
- "*EVAL", "*MOONEY", "/RUNSTAT", "ALPFILL",
- "ARCOLLAPSE", "ARDETACH", "ARFILL", "ARMERGE",
- "ARSPLIT", "FIPLOT", "GAPFINISH", "GAPLIST",
- "GAPMERGE", "GAPOPT", "GAPPLOT", "LNCOLLAPSE",
- "LNDETACH", "LNFILL", "LNMERGE", "LNSPLIT", "PCONV",
- "PLCONV", "PEMOPTS", "PEXCLUDE", "PINCLUDE", "PMETH",
- "/PMETH", "PMOPTS", "PPLOT", "PPRANGE", "PRCONV",
- "PRECISION", "RALL", "RFILSZ", "RITER", "RMEMRY",
- "RSPEED", "RSTAT", "RTIMST", "/RUNST", "RWFRNT",
- "SARPLOT", "SHSD", "SLPPLOT", "SLSPLOT", "VCVFILL",
- "/OPT", "OPEQN", "OPFACT", "OPFRST", "OPGRAD",
- "OPKEEP", "OPLOOP", "OPPRNT", "OPRAND", "OPSUBP",
- "OPSWEEP", "OPTYPE", "OPUSER", "OPVAR", "OPADD",
- "OPCLR", "OPDEL", "OPMAKE", "OPSEL", "OPANL", "OPDATA",
- "OPRESU", "OPSAVE", "OPEXE", "OPLFA", "OPLGR",
- "OPLIST", "OPLSW", "OPRFA", "OPRGR", "OPRSW",
- "PILECALC", "PILEDISPSET", "PILEGEN", "PILELOAD",
- "PILEMASS", "PILERUN", "PILESEL", "PILESTIF",
- "PLVAROPT", "PRVAROPT", "TOCOMP", "TODEF", "TOFREQ",
- "TOTYPE", "TOVAR", "TOEXE", "TOLOOP", "TOGRAPH",
- "TOLIST", "TOPLOT", "TOPRINT", "TOSTAT", "TZAMESH",
- "TZDELE", "TZEGEN", "XVAROPT", "PGSAVE", "SOLCONTROL",
- "TOTAL", "VTGEOM", "VTREAL", "VTSTAT")
-
- elafune = ("/ANUM", "AOFFST", "AOVLAP", "APLOT", "APPEND", "APTN",
- "ARCLEN", "ARCTRM", "AREAS", "AREFINE", "AREMESH",
- "AREVERSE", "AROTAT", "ARSCALE", "ARSYM", "ASBA",
- "ASBL", "ASBV", "ASBW", "ASCRES", "ASEL", "ASIFILE",
- "*ASK", "ASKIN", "ASLL", "ASLV", "ASOL", "/ASSIGN",
- "ASUB", "ASUM", "ATAN", "ATRAN", "ATYPE", "/AUTO",
- "AUTOTS", "/AUX2", "/AUX3", "/AUX12", "/AUX15",
- "AVPRIN", "AVRES", "AWAVE", "/AXLAB", "*AXPY",
- "/BATCH", "BCSOPTION", "BETAD", "BF", "BFA", "BFADELE",
- "BFALIST", "BFCUM", "BFDELE", "BFE", "BFECUM",
- "BFEDELE", "BFELIST", "BFESCAL", "BFINT", "BFK",
- "BFKDELE", "BFKLIST", "BFL", "BFLDELE", "BFLIST",
- "BFLLIST", "BFSCALE", "BFTRAN", "BFUNIF", "BFV",
- "BFVDELE", "BFVLIST", "BIOOPT", "BIOT", "BLC4", "BLC5",
- "BLOCK", "BOOL", "BOPTN", "BSAX", "BSMD", "BSM1",
- "BSM2", "BSPLIN", "BSS1", "BSS2", "BSTE", "BSTQ",
- "BTOL", "BUCOPT", "C", "CALC", "CAMPBELL", "CBDOF",
- "CBMD", "CBMX", "CBTE", "CBTMP", "CDOPT", "CDREAD",
- "CDWRITE", "CE", "CECHECK", "CECMOD", "CECYC",
- "CEDELE", "CEINTF", "CELIST", "CENTER", "CEQN",
- "CERIG", "CESGEN", "CFACT", "*CFCLOS", "*CFOPEN",
- "*CFWRITE", "/CFORMAT", "CGLOC", "CGOMGA", "CGROW",
- "CHECK", "CHKMSH", "CINT", "CIRCLE", "CISOL",
- "/CLABEL", "/CLEAR", "CLOCAL", "CLOG", "/CLOG",
- "CLRMSHLN", "CM", "CMACEL", "/CMAP", "CMATRIX",
- "CMDELE", "CMDOMEGA", "CMEDIT", "CMGRP", "CMLIST",
- "CMMOD", "CMOMEGA", "CMPLOT", "CMROTATE", "CMSEL",
- "CMSFILE", "CMSOPT", "CMWRITE", "CNCHECK", "CNKMOD",
- "CNTR", "CNVTOL", "/COLOR", "/COM", "*COMP", "COMBINE",
- "COMPRESS", "CON4", "CONE", "/CONFIG", "CONJUG",
- "/CONTOUR", "/COPY", "CORIOLIS", "COUPLE", "COVAL",
- "CP", "CPCYC", "CPDELE", "CPINTF", "/CPLANE", "CPLGEN",
- "CPLIST", "CPMERGE", "CPNGEN", "CPSGEN", "CQC",
- "*CREATE", "CRPLIM", "CS", "CSCIR", "CSDELE", "CSKP",
- "CSLIST", "CSWPLA", "CSYS", "/CTYPE", "CURR2D",
- "CUTCONTROL", "/CVAL", "CVAR", "/CWD", "CYCCALC",
- "/CYCEXPAND", "CYCFILES", "CYCFREQ", "*CYCLE",
- "CYCLIC", "CYCOPT", "CYCPHASE", "CYCSPEC", "CYL4",
- "CYL5", "CYLIND", "CZDEL", "CZMESH", "D", "DA",
- "DADELE", "DALIST", "DAMORPH", "DATA", "DATADEF",
- "DCGOMG", "DCUM", "DCVSWP", "DDASPEC", "DDELE",
- "DDOPTION", "DEACT", "DEFINE", "*DEL", "DELETE",
- "/DELETE", "DELTIM", "DEMORPH", "DERIV", "DESIZE",
- "DESOL", "DETAB", "/DEVDISP", "/DEVICE", "/DFLAB",
- "DFLX", "DFSWAVE", "DIG", "DIGIT", "*DIM",
- "/DIRECTORY", "DISPLAY", "/DIST", "DJ", "DJDELE",
- "DJLIST", "DK", "DKDELE", "DKLIST", "DL", "DLDELE",
- "DLIST", "DLLIST", "*DMAT", "DMOVE", "DMPEXT",
- "DMPOPTION", "DMPRAT", "DMPSTR", "DNSOL", "*DO", "DOF",
- "DOFSEL", "DOMEGA", "*DOT", "*DOWHILE", "DSCALE",
- "/DSCALE", "DSET", "DSPOPTION", "DSUM", "DSURF",
- "DSYM", "DSYS", "DTRAN", "DUMP", "/DV3D", "DVAL",
- "DVMORPH", "DYNOPT", "E", "EALIVE", "EDADAPT", "EDALE",
- "EDASMP", "EDBOUND", "EDBX", "EDBVIS", "EDCADAPT",
- "EDCGEN", "EDCLIST", "EDCMORE", "EDCNSTR", "EDCONTACT",
- "EDCPU", "EDCRB", "EDCSC", "EDCTS", "EDCURVE",
- "EDDAMP", "EDDBL", "EDDC", "EDDRELAX", "EDDUMP",
- "EDELE", "EDENERGY", "EDFPLOT", "EDGCALE", "/EDGE",
- "EDHGLS", "EDHIST", "EDHTIME", "EDINT", "EDIPART",
- "EDIS", "EDLCS", "EDLOAD", "EDMP", "EDNB", "EDNDTSD",
- "EDNROT", "EDOPT", "EDOUT", "EDPART", "EDPC", "EDPL",
- "EDPVEL", "EDRC", "EDRD", "EDREAD", "EDRI", "EDRST",
- "EDRUN", "EDSHELL", "EDSOLV", "EDSP", "EDSTART",
- "EDTERM", "EDTP", "EDVEL", "EDWELD", "EDWRITE",
- "EEXTRUDE", "/EFACET", "EGEN", "*EIGEN", "EINFIN",
- "EINTF", "EKILL", "ELBOW", "ELEM", "ELIST", "*ELSE",
- "*ELSEIF", "EMAGERR", "EMATWRITE", "EMF", "EMFT",
- "EMID", "EMIS", "EMODIF", "EMORE", "EMSYM", "EMTGEN",
- "EMUNIT", "EN", "*END", "*ENDDO", "*ENDIF",
- "ENDRELEASE", "ENERSOL", "ENGEN", "ENORM", "ENSYM",
- "EORIENT", "EPLOT", "EQSLV", "ERASE", "/ERASE",
- "EREAD", "EREFINE", "EREINF", "ERESX", "ERNORM",
- "ERRANG", "ESCHECK", "ESEL", "/ESHAPE", "ESIZE",
- "ESLA", "ESLL", "ESLN", "ESLV", "ESOL", "ESORT",
- "ESSOLV", "ESTIF", "ESURF", "ESYM", "ESYS", "ET",
- "ETABLE", "ETCHG", "ETCONTROL", "ETDELE", "ETLIST",
- "ETYPE", "EUSORT", "EWRITE", "*EXIT", "/EXIT", "EXP",
- "EXPAND", "/EXPAND", "EXPASS", "*EXPORT", "EXPROFILE",
- "EXPSOL", "EXTOPT", "EXTREM", "EXUNIT", "F", "/FACET",
- "FATIGUE", "FC", "FCCHECK", "FCDELE", "FCLIST", "FCUM",
- "FCTYP", "FDELE", "/FDELE", "FE", "FEBODY", "FECONS",
- "FEFOR", "FELIST", "FESURF", "*FFT", "FILE",
- "FILEAUX2", "FILEAUX3", "FILEDISP", "FILL", "FILLDATA",
- "/FILNAME", "FINISH", "FITEM", "FJ", "FJDELE",
- "FJLIST", "FK", "FKDELE", "FKLIST", "FL", "FLIST",
- "FLLIST", "FLST", "FLUXV", "FLUREAD", "FMAGBC",
- "FMAGSUM", "/FOCUS", "FOR2D", "FORCE", "FORM",
- "/FORMAT", "FP", "FPLIST", "*FREE", "FREQ", "FRQSCL",
- "FS", "FSCALE", "FSDELE", "FSLIST", "FSNODE", "FSPLOT",
- "FSSECT", "FSSPARM", "FSUM", "FTCALC", "FTRAN",
- "FTSIZE", "FTWRITE", "FTYPE", "FVMESH", "GAP", "GAPF",
- "GAUGE", "GCDEF", "GCGEN", "/GCMD", "/GCOLUMN",
- "GENOPT", "GEOM", "GEOMETRY", "*GET", "/GFILE",
- "/GFORMAT", "/GLINE", "/GMARKER", "GMATRIX", "GMFACE",
- "*GO", "/GO", "/GOLIST", "/GOPR", "GP", "GPDELE",
- "GPLIST", "GPLOT", "/GRAPHICS", "/GRESUME", "/GRID",
- "/GROPT", "GRP", "/GRTYP", "/GSAVE", "GSBDATA",
- "GSGDATA", "GSLIST", "GSSOL", "/GST", "GSUM", "/GTHK",
- "/GTYPE", "HARFRQ", "/HBC", "HBMAT", "/HEADER", "HELP",
- "HELPDISP", "HEMIOPT", "HFANG", "HFSYM", "HMAGSOLV",
- "HPGL", "HPTCREATE", "HPTDELETE", "HRCPLX", "HREXP",
- "HROPT", "HROCEAN", "HROUT", "IC", "ICDELE", "ICLIST",
- "/ICLWID", "/ICSCALE", "*IF", "IGESIN", "IGESOUT",
- "/IMAGE", "IMAGIN", "IMESH", "IMMED", "IMPD",
- "INISTATE", "*INIT", "/INPUT", "/INQUIRE", "INRES",
- "INRTIA", "INT1", "INTSRF", "IOPTN", "IRLF", "IRLIST",
- "*ITENGINE", "JPEG", "JSOL", "K", "KATT", "KBC",
- "KBETW", "KCALC", "KCENTER", "KCLEAR", "KDELE",
- "KDIST", "KEEP", "KESIZE", "KEYOPT", "KEYPTS", "KEYW",
- "KFILL", "KGEN", "KL", "KLIST", "KMESH", "KMODIF",
- "KMOVE", "KNODE", "KPLOT", "KPSCALE", "KREFINE",
- "KSCALE", "KSCON", "KSEL", "KSLL", "KSLN", "KSUM",
- "KSYMM", "KTRAN", "KUSE", "KWPAVE", "KWPLAN", "L",
- "L2ANG", "L2TAN", "LANG", "LARC", "/LARC", "LAREA",
- "LARGE", "LATT", "LAYER", "LAYERP26", "LAYLIST",
- "LAYPLOT", "LCABS", "LCASE", "LCCALC", "LCCAT",
- "LCDEF", "LCFACT", "LCFILE", "LCLEAR", "LCOMB",
- "LCOPER", "LCSEL", "LCSL", "LCSUM", "LCWRITE",
- "LCZERO", "LDELE", "LDIV", "LDRAG", "LDREAD", "LESIZE",
- "LEXTND", "LFILLT", "LFSURF", "LGEN", "LGLUE",
- "LGWRITE", "/LIGHT", "LINA", "LINE", "/LINE", "LINES",
- "LINL", "LINP", "LINV", "LIST", "*LIST", "LLIST",
- "LMATRIX", "LMESH", "LNSRCH", "LOCAL", "LOVLAP",
- "LPLOT", "LPTN", "LREFINE", "LREVERSE", "LROTAT",
- "LSBA", "*LSBAC", "LSBL", "LSBV", "LSBW", "LSCLEAR",
- "LSDELE", "*LSDUMP", "LSEL", "*LSENGINE", "*LSFACTOR",
- "LSLA", "LSLK", "LSOPER", "/LSPEC", "LSREAD",
- "*LSRESTORE", "LSSCALE", "LSSOLVE", "LSTR", "LSUM",
- "LSWRITE", "/LSYMBOL", "LSYMM", "LTAN", "LTRAN",
- "LUMPM", "LVSCALE", "LWPLAN", "M", "MADAPT", "MAGOPT",
- "MAGSOLV", "/MAIL", "MAP", "/MAP", "MAP2DTO3D",
- "MAPSOLVE", "MAPVAR", "MASTER", "MAT", "MATER",
- "MCHECK", "MDAMP", "MDELE", "MDPLOT", "MEMM", "/MENU",
- "MESHING", "MFANALYSIS", "MFBUCKET", "MFCALC", "MFCI",
- "MFCLEAR", "MFCMMAND", "MFCONV", "MFDTIME", "MFELEM",
- "MFEM", "MFEXTER", "MFFNAME", "MFFR", "MFIMPORT",
- "MFINTER", "MFITER", "MFLCOMM", "MFLIST", "MFMAP",
- "MFORDER", "MFOUTPUT", "*MFOURI", "MFPSIMUL", "MFRC",
- "MFRELAX", "MFRSTART", "MFSORDER", "MFSURFACE",
- "MFTIME", "MFTOL", "*MFUN", "MFVOLUME", "MFWRITE",
- "MGEN", "MIDTOL", "/MKDIR", "MLIST", "MMASS", "MMF",
- "MODCONT", "MODE", "MODIFY", "MODMSH", "MODSELOPTION",
- "MODOPT", "MONITOR", "*MOPER", "MOPT", "MORPH", "MOVE",
- "MP", "MPAMOD", "MPCHG", "MPCOPY", "MPDATA", "MPDELE",
- "MPDRES", "/MPLIB", "MPLIST", "MPPLOT", "MPREAD",
- "MPRINT", "MPTEMP", "MPTGEN", "MPTRES", "MPWRITE",
- "/MREP", "MSAVE", "*MSG", "MSHAPE", "MSHCOPY",
- "MSHKEY", "MSHMID", "MSHPATTERN", "MSOLVE", "/MSTART",
- "MSTOLE", "*MULT", "*MWRITE", "MXPAND", "N", "NANG",
- "NAXIS", "NCNV", "NDELE", "NDIST", "NDSURF", "NEQIT",
- "/NERR", "NFORCE", "NGEN", "NKPT", "NLADAPTIVE",
- "NLDIAG", "NLDPOST", "NLGEOM", "NLHIST", "NLIST",
- "NLMESH", "NLOG", "NLOPT", "NMODIF", "NOCOLOR",
- "NODES", "/NOERASE", "/NOLIST", "NOOFFSET", "NOORDER",
- "/NOPR", "NORA", "NORL", "/NORMAL", "NPLOT", "NPRINT",
- "NREAD", "NREFINE", "NRLSUM", "*NRM", "NROPT",
- "NROTAT", "NRRANG", "NSCALE", "NSEL", "NSLA", "NSLE",
- "NSLK", "NSLL", "NSLV", "NSMOOTH", "NSOL", "NSORT",
- "NSTORE", "NSUBST", "NSVR", "NSYM", "/NUMBER",
- "NUMCMP", "NUMEXP", "NUMMRG", "NUMOFF", "NUMSTR",
- "NUMVAR", "NUSORT", "NWPAVE", "NWPLAN", "NWRITE",
- "OCDATA", "OCDELETE", "OCLIST", "OCREAD", "OCTABLE",
- "OCTYPE", "OCZONE", "OMEGA", "OPERATE", "OPNCONTROL",
- "OUTAERO", "OUTOPT", "OUTPR", "/OUTPUT", "OUTRES",
- "OVCHECK", "PADELE", "/PAGE", "PAGET", "PAPUT",
- "PARESU", "PARTSEL", "PARRES", "PARSAV", "PASAVE",
- "PATH", "PAUSE", "/PBC", "/PBF", "PCALC", "PCGOPT",
- "PCIRC", "/PCIRCLE", "/PCOPY", "PCROSS", "PDANL",
- "PDCDF", "PDCFLD", "PDCLR", "PDCMAT", "PDCORR",
- "PDDMCS", "PDDOEL", "PDEF", "PDEXE", "PDHIST",
- "PDINQR", "PDLHS", "PDMETH", "PDOT", "PDPINV",
- "PDPLOT", "PDPROB", "PDRESU", "PDROPT", "/PDS",
- "PDSAVE", "PDSCAT", "PDSENS", "PDSHIS", "PDUSER",
- "PDVAR", "PDWRITE", "PERBC2D", "PERTURB", "PFACT",
- "PHYSICS", "PIVCHECK", "PLCAMP", "PLCFREQ", "PLCHIST",
- "PLCINT", "PLCPLX", "PLCRACK", "PLDISP", "PLESOL",
- "PLETAB", "PLFAR", "PLF2D", "PLGEOM", "PLLS", "PLMAP",
- "PLMC", "PLNEAR", "PLNSOL", "/PLOPTS", "PLORB", "PLOT",
- "PLOTTING", "PLPAGM", "PLPATH", "PLSECT", "PLST",
- "PLTIME", "PLTRAC", "PLVAR", "PLVECT", "PLZZ",
- "/PMACRO", "PMAP", "PMGTRAN", "PMLOPT", "PMLSIZE",
- "/PMORE", "PNGR", "/PNUM", "POINT", "POLY", "/POLYGON",
- "/POST1", "/POST26", "POWERH", "PPATH", "PRANGE",
- "PRAS", "PRCAMP", "PRCINT", "PRCPLX", "PRED",
- "PRENERGY", "/PREP7", "PRERR", "PRESOL", "PRETAB",
- "PRFAR", "PRI2", "PRIM", "PRINT", "*PRINT", "PRISM",
- "PRITER", "PRJSOL", "PRNEAR", "PRNLD", "PRNSOL",
- "PROD", "PRORB", "PRPATH", "PRRFOR", "PRRSOL",
- "PRSCONTROL", "PRSECT", "PRTIME", "PRVAR", "PRVECT",
- "PSCONTROL", "PSCR", "PSDCOM", "PSDFRQ", "PSDGRAPH",
- "PSDRES", "PSDSPL", "PSDUNIT", "PSDVAL", "PSDWAV",
- "/PSEARCH", "PSEL", "/PSF", "PSMAT", "PSMESH",
- "/PSPEC", "/PSTATUS", "PSTRES", "/PSYMB", "PTR",
- "PTXY", "PVECT", "/PWEDGE", "QDVAL", "QRDOPT", "QSOPT",
- "QUAD", "/QUIT", "QUOT", "R", "RACE", "RADOPT",
- "RAPPND", "RATE", "/RATIO", "RBE3", "RCON", "RCYC",
- "RDEC", "RDELE", "READ", "REAL", "REALVAR", "RECTNG",
- "REMESH", "/RENAME", "REORDER", "*REPEAT", "/REPLOT",
- "RESCOMBINE", "RESCONTROL", "RESET", "/RESET", "RESP",
- "RESUME", "RESVEC", "RESWRITE", "*RETURN", "REXPORT",
- "REZONE", "RFORCE", "/RGB", "RIGID", "RIGRESP",
- "RIMPORT", "RLIST", "RMALIST", "RMANL", "RMASTER",
- "RMCAP", "RMCLIST", "/RMDIR", "RMFLVEC", "RMLVSCALE",
- "RMMLIST", "RMMRANGE", "RMMSELECT", "RMNDISP",
- "RMNEVEC", "RMODIF", "RMORE", "RMPORDER", "RMRESUME",
- "RMRGENERATE", "RMROPTIONS", "RMRPLOT", "RMRSTATUS",
- "RMSAVE", "RMSMPLE", "RMUSE", "RMXPORT", "ROCK",
- "ROSE", "RPOLY", "RPR4", "RPRISM", "RPSD", "RSFIT",
- "RSOPT", "RSPLIT", "RSPLOT", "RSPRNT", "RSSIMS",
- "RSTMAC", "RSTOFF", "RSURF", "RSYMM", "RSYS", "RTHICK",
- "SABS", "SADD", "SALLOW", "SAVE", "SBCLIST", "SBCTRAN",
- "SDELETE", "SE", "SECCONTROL", "SECDATA",
- "SECFUNCTION", "SECJOINT", "/SECLIB", "SECLOCK",
- "SECMODIF", "SECNUM", "SECOFFSET", "SECPLOT",
- "SECREAD", "SECSTOP", "SECTYPE", "SECWRITE", "SED",
- "SEDLIST", "SEEXP", "/SEG", "SEGEN", "SELIST", "SELM",
- "SELTOL", "SENERGY", "SEOPT", "SESYMM", "*SET", "SET",
- "SETFGAP", "SETRAN", "SEXP", "SF", "SFA", "SFACT",
- "SFADELE", "SFALIST", "SFBEAM", "SFCALC", "SFCUM",
- "SFDELE", "SFE", "SFEDELE", "SFELIST", "SFFUN",
- "SFGRAD", "SFL", "SFLDELE", "SFLEX", "SFLIST",
- "SFLLIST", "SFSCALE", "SFTRAN", "/SHADE", "SHELL",
- "/SHOW", "/SHOWDISP", "SHPP", "/SHRINK", "SLIST",
- "SLOAD", "SMALL", "*SMAT", "SMAX", "/SMBC", "SMBODY",
- "SMCONS", "SMFOR", "SMIN", "SMOOTH", "SMRTSIZE",
- "SMSURF", "SMULT", "SNOPTION", "SOLU", "/SOLU",
- "SOLUOPT", "SOLVE", "SORT", "SOURCE", "SPACE",
- "SPCNOD", "SPCTEMP", "SPDAMP", "SPEC", "SPFREQ",
- "SPGRAPH", "SPH4", "SPH5", "SPHERE", "SPLINE", "SPLOT",
- "SPMWRITE", "SPOINT", "SPOPT", "SPREAD", "SPTOPT",
- "SPOWER", "SPUNIT", "SPVAL", "SQRT", "*SREAD", "SRSS",
- "SSBT", "/SSCALE", "SSLN", "SSMT", "SSPA", "SSPB",
- "SSPD", "SSPE", "SSPM", "SSUM", "SSTATE", "STABILIZE",
- "STAOPT", "STAT", "*STATUS", "/STATUS", "STEF",
- "/STITLE", "STORE", "SUBOPT", "SUBSET", "SUCALC",
- "SUCR", "SUDEL", "SUEVAL", "SUGET", "SUMAP", "SUMTYPE",
- "SUPL", "SUPR", "SURESU", "SUSAVE", "SUSEL", "SUVECT",
- "SV", "SVPLOT", "SVTYP", "SWADD", "SWDEL", "SWGEN",
- "SWLIST", "SYNCHRO", "/SYP", "/SYS", "TALLOW",
- "TARGET", "*TAXIS", "TB", "TBCOPY", "TBDATA", "TBDELE",
- "TBEO", "TBIN", "TBFIELD", "TBFT", "TBLE", "TBLIST",
- "TBMODIF", "TBPLOT", "TBPT", "TBTEMP", "TCHG", "/TEE",
- "TERM", "THEXPAND", "THOPT", "TIFF", "TIME",
- "TIMERANGE", "TIMINT", "TIMP", "TINTP", "/TITLE",
- "/TLABEL", "TOFFST", "*TOPER", "TORQ2D", "TORQC2D",
- "TORQSUM", "TORUS", "TRANS", "TRANSFER", "*TREAD",
- "TREF", "/TRIAD", "/TRLCY", "TRNOPT", "TRPDEL",
- "TRPLIS", "TRPOIN", "TRTIME", "TSHAP", "/TSPEC",
- "TSRES", "TUNIF", "TVAR", "/TXTRE", "/TYPE", "TYPE",
- "/UCMD", "/UDOC", "/UI", "UIMP", "/UIS", "*ULIB",
- "UNDELETE", "UNDO", "/UNITS", "UNPAUSE", "UPCOORD",
- "UPGEOM", "*USE", "/USER", "USRCAL", "USRDOF",
- "USRELEM", "V", "V2DOPT", "VA", "*VABS", "VADD",
- "VARDEL", "VARNAM", "VATT", "VCLEAR", "*VCOL",
- "/VCONE", "VCROSS", "*VCUM", "VDDAM", "VDELE", "VDGL",
- "VDOT", "VDRAG", "*VEC", "*VEDIT", "VEORIENT", "VEXT",
- "*VFACT", "*VFILL", "VFOPT", "VFQUERY", "VFSM",
- "*VFUN", "VGEN", "*VGET", "VGET", "VGLUE", "/VIEW",
- "VIMP", "VINP", "VINV", "*VITRP", "*VLEN", "VLIST",
- "VLSCALE", "*VMASK", "VMESH", "VOFFST", "VOLUMES")
-
- # list of in-built () functions
- elafunf = ("NX()", "NY()", "NZ()", "KX()", "KY()", "KZ()", "LX()",
- "LY()", "LZ()", "LSX()", "LSY()", "LSZ()", "NODE()",
- "KP()", "DISTND()", "DISTKP()", "DISTEN()", "ANGLEN()",
- "ANGLEK()", "NNEAR()", "KNEAR()", "ENEARN()",
- "AREAND()", "AREAKP()", "ARNODE()", "NORMNX()",
- "NORMNY()", "NORMNZ()", "NORMKX()", "NORMKY()",
- "NORMKZ()", "ENEXTN()", "NELEM()", "NODEDOF()",
- "ELADJ()", "NDFACE()", "NMFACE()", "ARFACE()", "UX()",
- "UY()", "UZ()", "ROTX()", "ROTY()", "ROTZ()", "TEMP()",
- "PRES()", "VX()", "VY()", "VZ()", "ENKE()", "ENDS()",
- "VOLT()", "MAG()", "AX()", "AY()", "AZ()",
- "VIRTINQR()", "KWGET()", "VALCHR()", "VALHEX()",
- "CHRHEX()", "STRFILL()", "STRCOMP()", "STRPOS()",
- "STRLENG()", "UPCASE()", "LWCASE()", "JOIN()",
- "SPLIT()", "ABS()", "SIGN()", "CXABS()", "EXP()",
- "LOG()", "LOG10()", "SQRT()", "NINT()", "MOD()",
- "RAND()", "GDIS()", "SIN()", "COS()", "TAN()",
- "SINH()", "COSH()", "TANH()", "ASIN()", "ACOS()",
- "ATAN()", "ATAN2()")
-
- elafung = ("NSEL()", "ESEL()", "KSEL()", "LSEL()", "ASEL()",
- "VSEL()", "NDNEXT()", "ELNEXT()", "KPNEXT()",
- "LSNEXT()", "ARNEXT()", "VLNEXT()", "CENTRX()",
- "CENTRY()", "CENTRZ()")
-
- elafunh = ("~CAT5IN", "~CATIAIN", "~PARAIN", "~PROEIN", "~SATIN",
- "~UGIN", "A", "AADD", "AATT", "ABEXTRACT", "*ABBR",
- "ABBRES", "ABBSAV", "ABS", "ACCAT", "ACCOPTION",
- "ACEL", "ACLEAR", "ADAMS", "ADAPT", "ADD", "ADDAM",
- "ADELE", "ADGL", "ADRAG", "AESIZE", "AFILLT", "AFLIST",
- "AFSURF", "*AFUN", "AGEN", "AGLUE", "AINA", "AINP",
- "AINV", "AL", "ALIST", "ALLSEL", "ALPHAD", "AMAP",
- "AMESH", "/AN3D", "ANCNTR", "ANCUT", "ANCYC", "ANDATA",
- "ANDSCL", "ANDYNA", "/ANFILE", "ANFLOW", "/ANGLE",
- "ANHARM", "ANIM", "ANISOS", "ANMODE", "ANMRES",
- "/ANNOT", "ANORM", "ANPRES", "ANSOL", "ANSTOAQWA",
- "ANSTOASAS", "ANTIME", "ANTYPE")
-
- tokens = {
- 'root': [
- (r'!.*\n', Comment),
- include('strings'),
- include('core'),
- include('nums'),
- (words((elafunb+elafunc+elafund+elafune+elafunh), suffix=r'\b'), Keyword),
- (words((elafunf+elafung), suffix=r'\b'), Name.Builtin),
- (r'AR[0-9]+', Name.Variable.Instance),
- (r'[a-z][a-z0-9_]*', Name.Variable),
+
+__all__ = ['apdlexer']
+
+
+class apdlexer(RegexLexer):
+ """
+ For APDL source code.
+
+ .. versionadded:: 2.9
+ """
+ name = 'ANSYS parametric design language'
+ aliases = ['ansys', 'apdl']
+ filenames = ['*.ans']
+ flags = re.IGNORECASE
+
+ # list of elements
+ elafunb = ("SURF152", "SURF153", "SURF154", "SURF156", "SHELL157",
+ "SURF159", "LINK160", "BEAM161", "PLANE162",
+ "SHELL163", "SOLID164", "COMBI165", "MASS166",
+ "LINK167", "SOLID168", "TARGE169", "TARGE170",
+ "CONTA171", "CONTA172", "CONTA173", "CONTA174",
+ "CONTA175", "CONTA176", "CONTA177", "CONTA178",
+ "PRETS179", "LINK180", "SHELL181", "PLANE182",
+ "PLANE183", "MPC184", "SOLID185", "SOLID186",
+ "SOLID187", "BEAM188", "BEAM189", "SOLSH190",
+ "INTER192", "INTER193", "INTER194", "INTER195",
+ "MESH200", "FOLLW201", "INTER202", "INTER203",
+ "INTER204", "INTER205", "SHELL208", "SHELL209",
+ "CPT212", "CPT213", "COMBI214", "CPT215", "CPT216",
+ "CPT217", "FLUID220", "FLUID221", "PLANE223",
+ "SOLID226", "SOLID227", "PLANE230", "SOLID231",
+ "SOLID232", "PLANE233", "SOLID236", "SOLID237",
+ "PLANE238", "SOLID239", "SOLID240", "HSFLD241",
+ "HSFLD242", "SURF251", "SURF252", "REINF263",
+ "REINF264", "REINF265", "SOLID272", "SOLID273",
+ "SOLID278", "SOLID279", "SHELL281", "SOLID285",
+ "PIPE288", "PIPE289", "ELBOW290", "USER300", "BEAM3",
+ "BEAM4", "BEAM23", "BEAM24", "BEAM44", "BEAM54",
+ "COMBIN7", "FLUID79", "FLUID80", "FLUID81", "FLUID141",
+ "FLUID142", "INFIN9", "INFIN47", "PLANE13", "PLANE25",
+ "PLANE42", "PLANE53", "PLANE67", "PLANE82", "PLANE83",
+ "PLANE145", "PLANE146", "CONTAC12", "CONTAC52",
+ "LINK1", "LINK8", "LINK10", "LINK32", "PIPE16",
+ "PIPE17", "PIPE18", "PIPE20", "PIPE59", "PIPE60",
+ "SHELL41", "SHELL43", "SHELL57", "SHELL63", "SHELL91",
+ "SHELL93", "SHELL99", "SHELL150", "SOLID5", "SOLID45",
+ "SOLID46", "SOLID65", "SOLID69", "SOLID92", "SOLID95",
+ "SOLID117", "SOLID127", "SOLID128", "SOLID147",
+ "SOLID148", "SOLID191", "VISCO88", "VISCO89",
+ "VISCO106", "VISCO107", "VISCO108", "TRANS109")
+
+ elafunc = ("PGRAPH", "/VT", "VTIN", "VTRFIL", "VTTEMP", "PGRSET",
+ "VTCLR", "VTMETH", "VTRSLT", "VTVMOD", "PGSELE",
+ "VTDISC", "VTMP", "VTSEC", "PGWRITE", "VTEVAL", "VTOP",
+ "VTSFE", "POUTRES", "VTFREQ", "VTPOST", "VTSL",
+ "FLDATA1-40", "HFPCSWP", "MSDATA", "MSVARY", "QFACT",
+ "FLOCHECK", "HFPOWER", "MSMASS", "PERI", "SPADP",
+ "FLREAD", "HFPORT", "MSMETH", "PLFSS", "SPARM",
+ "FLOTRAN", "HFSCAT", "MSMIR", "PLSCH", "SPFSS",
+ "HFADP", "ICE", "MSNOMF", "PLSYZ", "SPICE", "HFARRAY",
+ "ICEDELE", "MSPROP", "PLTD", "SPSCAN", "HFDEEM",
+ "ICELIST", "MSQUAD", "PLTLINE", "SPSWP", "HFEIGOPT",
+ "ICVFRC", "MSRELAX", "PLVFRC", "HFEREFINE", "LPRT",
+ "MSSOLU", "/PICE", "HFMODPRT", "MSADV", "MSSPEC",
+ "PLWAVE", "HFPA", "MSCAP", "MSTERM", "PRSYZ")
+
+ elafund = ("*VOPER", "VOVLAP", "*VPLOT", "VPLOT", "VPTN", "*VPUT",
+ "VPUT", "*VREAD", "VROTAT", "VSBA", "VSBV", "VSBW",
+ "/VSCALE", "*VSCFUN", "VSEL", "VSLA", "*VSTAT", "VSUM",
+ "VSWEEP", "VSYMM", "VTRAN", "VTYPE", "/VUP", "*VWRITE",
+ "/WAIT", "WAVES", "WERASE", "WFRONT", "/WINDOW",
+ "WMID", "WMORE", "WPAVE", "WPCSYS", "WPLANE", "WPOFFS",
+ "WPROTA", "WPSTYL", "WRFULL", "WRITE", "WRITEMAP",
+ "*WRK", "WSORT", "WSPRINGS", "WSTART", "WTBCREATE",
+ "XFDATA", "XFENRICH", "XFLIST", "/XFRM", "/XRANGE",
+ "XVAR", "/YRANGE", "/ZOOM", "/WB", "XMLO", "/XML",
+ "CNTR", "EBLOCK", "CMBLOCK", "NBLOCK", "/TRACK",
+ "CWZPLOT", "~EUI", "NELE", "EALL", "NALL", "FLITEM",
+ "LSLN", "PSOLVE", "ASLN", "/VERIFY", "/SSS", "~CFIN",
+ "*EVAL", "*MOONEY", "/RUNSTAT", "ALPFILL",
+ "ARCOLLAPSE", "ARDETACH", "ARFILL", "ARMERGE",
+ "ARSPLIT", "FIPLOT", "GAPFINISH", "GAPLIST",
+ "GAPMERGE", "GAPOPT", "GAPPLOT", "LNCOLLAPSE",
+ "LNDETACH", "LNFILL", "LNMERGE", "LNSPLIT", "PCONV",
+ "PLCONV", "PEMOPTS", "PEXCLUDE", "PINCLUDE", "PMETH",
+ "/PMETH", "PMOPTS", "PPLOT", "PPRANGE", "PRCONV",
+ "PRECISION", "RALL", "RFILSZ", "RITER", "RMEMRY",
+ "RSPEED", "RSTAT", "RTIMST", "/RUNST", "RWFRNT",
+ "SARPLOT", "SHSD", "SLPPLOT", "SLSPLOT", "VCVFILL",
+ "/OPT", "OPEQN", "OPFACT", "OPFRST", "OPGRAD",
+ "OPKEEP", "OPLOOP", "OPPRNT", "OPRAND", "OPSUBP",
+ "OPSWEEP", "OPTYPE", "OPUSER", "OPVAR", "OPADD",
+ "OPCLR", "OPDEL", "OPMAKE", "OPSEL", "OPANL", "OPDATA",
+ "OPRESU", "OPSAVE", "OPEXE", "OPLFA", "OPLGR",
+ "OPLIST", "OPLSW", "OPRFA", "OPRGR", "OPRSW",
+ "PILECALC", "PILEDISPSET", "PILEGEN", "PILELOAD",
+ "PILEMASS", "PILERUN", "PILESEL", "PILESTIF",
+ "PLVAROPT", "PRVAROPT", "TOCOMP", "TODEF", "TOFREQ",
+ "TOTYPE", "TOVAR", "TOEXE", "TOLOOP", "TOGRAPH",
+ "TOLIST", "TOPLOT", "TOPRINT", "TOSTAT", "TZAMESH",
+ "TZDELE", "TZEGEN", "XVAROPT", "PGSAVE", "SOLCONTROL",
+ "TOTAL", "VTGEOM", "VTREAL", "VTSTAT")
+
+ elafune = ("/ANUM", "AOFFST", "AOVLAP", "APLOT", "APPEND", "APTN",
+ "ARCLEN", "ARCTRM", "AREAS", "AREFINE", "AREMESH",
+ "AREVERSE", "AROTAT", "ARSCALE", "ARSYM", "ASBA",
+ "ASBL", "ASBV", "ASBW", "ASCRES", "ASEL", "ASIFILE",
+ "*ASK", "ASKIN", "ASLL", "ASLV", "ASOL", "/ASSIGN",
+ "ASUB", "ASUM", "ATAN", "ATRAN", "ATYPE", "/AUTO",
+ "AUTOTS", "/AUX2", "/AUX3", "/AUX12", "/AUX15",
+ "AVPRIN", "AVRES", "AWAVE", "/AXLAB", "*AXPY",
+ "/BATCH", "BCSOPTION", "BETAD", "BF", "BFA", "BFADELE",
+ "BFALIST", "BFCUM", "BFDELE", "BFE", "BFECUM",
+ "BFEDELE", "BFELIST", "BFESCAL", "BFINT", "BFK",
+ "BFKDELE", "BFKLIST", "BFL", "BFLDELE", "BFLIST",
+ "BFLLIST", "BFSCALE", "BFTRAN", "BFUNIF", "BFV",
+ "BFVDELE", "BFVLIST", "BIOOPT", "BIOT", "BLC4", "BLC5",
+ "BLOCK", "BOOL", "BOPTN", "BSAX", "BSMD", "BSM1",
+ "BSM2", "BSPLIN", "BSS1", "BSS2", "BSTE", "BSTQ",
+ "BTOL", "BUCOPT", "C", "CALC", "CAMPBELL", "CBDOF",
+ "CBMD", "CBMX", "CBTE", "CBTMP", "CDOPT", "CDREAD",
+ "CDWRITE", "CE", "CECHECK", "CECMOD", "CECYC",
+ "CEDELE", "CEINTF", "CELIST", "CENTER", "CEQN",
+ "CERIG", "CESGEN", "CFACT", "*CFCLOS", "*CFOPEN",
+ "*CFWRITE", "/CFORMAT", "CGLOC", "CGOMGA", "CGROW",
+ "CHECK", "CHKMSH", "CINT", "CIRCLE", "CISOL",
+ "/CLABEL", "/CLEAR", "CLOCAL", "CLOG", "/CLOG",
+ "CLRMSHLN", "CM", "CMACEL", "/CMAP", "CMATRIX",
+ "CMDELE", "CMDOMEGA", "CMEDIT", "CMGRP", "CMLIST",
+ "CMMOD", "CMOMEGA", "CMPLOT", "CMROTATE", "CMSEL",
+ "CMSFILE", "CMSOPT", "CMWRITE", "CNCHECK", "CNKMOD",
+ "CNTR", "CNVTOL", "/COLOR", "/COM", "*COMP", "COMBINE",
+ "COMPRESS", "CON4", "CONE", "/CONFIG", "CONJUG",
+ "/CONTOUR", "/COPY", "CORIOLIS", "COUPLE", "COVAL",
+ "CP", "CPCYC", "CPDELE", "CPINTF", "/CPLANE", "CPLGEN",
+ "CPLIST", "CPMERGE", "CPNGEN", "CPSGEN", "CQC",
+ "*CREATE", "CRPLIM", "CS", "CSCIR", "CSDELE", "CSKP",
+ "CSLIST", "CSWPLA", "CSYS", "/CTYPE", "CURR2D",
+ "CUTCONTROL", "/CVAL", "CVAR", "/CWD", "CYCCALC",
+ "/CYCEXPAND", "CYCFILES", "CYCFREQ", "*CYCLE",
+ "CYCLIC", "CYCOPT", "CYCPHASE", "CYCSPEC", "CYL4",
+ "CYL5", "CYLIND", "CZDEL", "CZMESH", "D", "DA",
+ "DADELE", "DALIST", "DAMORPH", "DATA", "DATADEF",
+ "DCGOMG", "DCUM", "DCVSWP", "DDASPEC", "DDELE",
+ "DDOPTION", "DEACT", "DEFINE", "*DEL", "DELETE",
+ "/DELETE", "DELTIM", "DEMORPH", "DERIV", "DESIZE",
+ "DESOL", "DETAB", "/DEVDISP", "/DEVICE", "/DFLAB",
+ "DFLX", "DFSWAVE", "DIG", "DIGIT", "*DIM",
+ "/DIRECTORY", "DISPLAY", "/DIST", "DJ", "DJDELE",
+ "DJLIST", "DK", "DKDELE", "DKLIST", "DL", "DLDELE",
+ "DLIST", "DLLIST", "*DMAT", "DMOVE", "DMPEXT",
+ "DMPOPTION", "DMPRAT", "DMPSTR", "DNSOL", "*DO", "DOF",
+ "DOFSEL", "DOMEGA", "*DOT", "*DOWHILE", "DSCALE",
+ "/DSCALE", "DSET", "DSPOPTION", "DSUM", "DSURF",
+ "DSYM", "DSYS", "DTRAN", "DUMP", "/DV3D", "DVAL",
+ "DVMORPH", "DYNOPT", "E", "EALIVE", "EDADAPT", "EDALE",
+ "EDASMP", "EDBOUND", "EDBX", "EDBVIS", "EDCADAPT",
+ "EDCGEN", "EDCLIST", "EDCMORE", "EDCNSTR", "EDCONTACT",
+ "EDCPU", "EDCRB", "EDCSC", "EDCTS", "EDCURVE",
+ "EDDAMP", "EDDBL", "EDDC", "EDDRELAX", "EDDUMP",
+ "EDELE", "EDENERGY", "EDFPLOT", "EDGCALE", "/EDGE",
+ "EDHGLS", "EDHIST", "EDHTIME", "EDINT", "EDIPART",
+ "EDIS", "EDLCS", "EDLOAD", "EDMP", "EDNB", "EDNDTSD",
+ "EDNROT", "EDOPT", "EDOUT", "EDPART", "EDPC", "EDPL",
+ "EDPVEL", "EDRC", "EDRD", "EDREAD", "EDRI", "EDRST",
+ "EDRUN", "EDSHELL", "EDSOLV", "EDSP", "EDSTART",
+ "EDTERM", "EDTP", "EDVEL", "EDWELD", "EDWRITE",
+ "EEXTRUDE", "/EFACET", "EGEN", "*EIGEN", "EINFIN",
+ "EINTF", "EKILL", "ELBOW", "ELEM", "ELIST", "*ELSE",
+ "*ELSEIF", "EMAGERR", "EMATWRITE", "EMF", "EMFT",
+ "EMID", "EMIS", "EMODIF", "EMORE", "EMSYM", "EMTGEN",
+ "EMUNIT", "EN", "*END", "*ENDDO", "*ENDIF",
+ "ENDRELEASE", "ENERSOL", "ENGEN", "ENORM", "ENSYM",
+ "EORIENT", "EPLOT", "EQSLV", "ERASE", "/ERASE",
+ "EREAD", "EREFINE", "EREINF", "ERESX", "ERNORM",
+ "ERRANG", "ESCHECK", "ESEL", "/ESHAPE", "ESIZE",
+ "ESLA", "ESLL", "ESLN", "ESLV", "ESOL", "ESORT",
+ "ESSOLV", "ESTIF", "ESURF", "ESYM", "ESYS", "ET",
+ "ETABLE", "ETCHG", "ETCONTROL", "ETDELE", "ETLIST",
+ "ETYPE", "EUSORT", "EWRITE", "*EXIT", "/EXIT", "EXP",
+ "EXPAND", "/EXPAND", "EXPASS", "*EXPORT", "EXPROFILE",
+ "EXPSOL", "EXTOPT", "EXTREM", "EXUNIT", "F", "/FACET",
+ "FATIGUE", "FC", "FCCHECK", "FCDELE", "FCLIST", "FCUM",
+ "FCTYP", "FDELE", "/FDELE", "FE", "FEBODY", "FECONS",
+ "FEFOR", "FELIST", "FESURF", "*FFT", "FILE",
+ "FILEAUX2", "FILEAUX3", "FILEDISP", "FILL", "FILLDATA",
+ "/FILNAME", "FINISH", "FITEM", "FJ", "FJDELE",
+ "FJLIST", "FK", "FKDELE", "FKLIST", "FL", "FLIST",
+ "FLLIST", "FLST", "FLUXV", "FLUREAD", "FMAGBC",
+ "FMAGSUM", "/FOCUS", "FOR2D", "FORCE", "FORM",
+ "/FORMAT", "FP", "FPLIST", "*FREE", "FREQ", "FRQSCL",
+ "FS", "FSCALE", "FSDELE", "FSLIST", "FSNODE", "FSPLOT",
+ "FSSECT", "FSSPARM", "FSUM", "FTCALC", "FTRAN",
+ "FTSIZE", "FTWRITE", "FTYPE", "FVMESH", "GAP", "GAPF",
+ "GAUGE", "GCDEF", "GCGEN", "/GCMD", "/GCOLUMN",
+ "GENOPT", "GEOM", "GEOMETRY", "*GET", "/GFILE",
+ "/GFORMAT", "/GLINE", "/GMARKER", "GMATRIX", "GMFACE",
+ "*GO", "/GO", "/GOLIST", "/GOPR", "GP", "GPDELE",
+ "GPLIST", "GPLOT", "/GRAPHICS", "/GRESUME", "/GRID",
+ "/GROPT", "GRP", "/GRTYP", "/GSAVE", "GSBDATA",
+ "GSGDATA", "GSLIST", "GSSOL", "/GST", "GSUM", "/GTHK",
+ "/GTYPE", "HARFRQ", "/HBC", "HBMAT", "/HEADER", "HELP",
+ "HELPDISP", "HEMIOPT", "HFANG", "HFSYM", "HMAGSOLV",
+ "HPGL", "HPTCREATE", "HPTDELETE", "HRCPLX", "HREXP",
+ "HROPT", "HROCEAN", "HROUT", "IC", "ICDELE", "ICLIST",
+ "/ICLWID", "/ICSCALE", "*IF", "IGESIN", "IGESOUT",
+ "/IMAGE", "IMAGIN", "IMESH", "IMMED", "IMPD",
+ "INISTATE", "*INIT", "/INPUT", "/INQUIRE", "INRES",
+ "INRTIA", "INT1", "INTSRF", "IOPTN", "IRLF", "IRLIST",
+ "*ITENGINE", "JPEG", "JSOL", "K", "KATT", "KBC",
+ "KBETW", "KCALC", "KCENTER", "KCLEAR", "KDELE",
+ "KDIST", "KEEP", "KESIZE", "KEYOPT", "KEYPTS", "KEYW",
+ "KFILL", "KGEN", "KL", "KLIST", "KMESH", "KMODIF",
+ "KMOVE", "KNODE", "KPLOT", "KPSCALE", "KREFINE",
+ "KSCALE", "KSCON", "KSEL", "KSLL", "KSLN", "KSUM",
+ "KSYMM", "KTRAN", "KUSE", "KWPAVE", "KWPLAN", "L",
+ "L2ANG", "L2TAN", "LANG", "LARC", "/LARC", "LAREA",
+ "LARGE", "LATT", "LAYER", "LAYERP26", "LAYLIST",
+ "LAYPLOT", "LCABS", "LCASE", "LCCALC", "LCCAT",
+ "LCDEF", "LCFACT", "LCFILE", "LCLEAR", "LCOMB",
+ "LCOPER", "LCSEL", "LCSL", "LCSUM", "LCWRITE",
+ "LCZERO", "LDELE", "LDIV", "LDRAG", "LDREAD", "LESIZE",
+ "LEXTND", "LFILLT", "LFSURF", "LGEN", "LGLUE",
+ "LGWRITE", "/LIGHT", "LINA", "LINE", "/LINE", "LINES",
+ "LINL", "LINP", "LINV", "LIST", "*LIST", "LLIST",
+ "LMATRIX", "LMESH", "LNSRCH", "LOCAL", "LOVLAP",
+ "LPLOT", "LPTN", "LREFINE", "LREVERSE", "LROTAT",
+ "LSBA", "*LSBAC", "LSBL", "LSBV", "LSBW", "LSCLEAR",
+ "LSDELE", "*LSDUMP", "LSEL", "*LSENGINE", "*LSFACTOR",
+ "LSLA", "LSLK", "LSOPER", "/LSPEC", "LSREAD",
+ "*LSRESTORE", "LSSCALE", "LSSOLVE", "LSTR", "LSUM",
+ "LSWRITE", "/LSYMBOL", "LSYMM", "LTAN", "LTRAN",
+ "LUMPM", "LVSCALE", "LWPLAN", "M", "MADAPT", "MAGOPT",
+ "MAGSOLV", "/MAIL", "MAP", "/MAP", "MAP2DTO3D",
+ "MAPSOLVE", "MAPVAR", "MASTER", "MAT", "MATER",
+ "MCHECK", "MDAMP", "MDELE", "MDPLOT", "MEMM", "/MENU",
+ "MESHING", "MFANALYSIS", "MFBUCKET", "MFCALC", "MFCI",
+ "MFCLEAR", "MFCMMAND", "MFCONV", "MFDTIME", "MFELEM",
+ "MFEM", "MFEXTER", "MFFNAME", "MFFR", "MFIMPORT",
+ "MFINTER", "MFITER", "MFLCOMM", "MFLIST", "MFMAP",
+ "MFORDER", "MFOUTPUT", "*MFOURI", "MFPSIMUL", "MFRC",
+ "MFRELAX", "MFRSTART", "MFSORDER", "MFSURFACE",
+ "MFTIME", "MFTOL", "*MFUN", "MFVOLUME", "MFWRITE",
+ "MGEN", "MIDTOL", "/MKDIR", "MLIST", "MMASS", "MMF",
+ "MODCONT", "MODE", "MODIFY", "MODMSH", "MODSELOPTION",
+ "MODOPT", "MONITOR", "*MOPER", "MOPT", "MORPH", "MOVE",
+ "MP", "MPAMOD", "MPCHG", "MPCOPY", "MPDATA", "MPDELE",
+ "MPDRES", "/MPLIB", "MPLIST", "MPPLOT", "MPREAD",
+ "MPRINT", "MPTEMP", "MPTGEN", "MPTRES", "MPWRITE",
+ "/MREP", "MSAVE", "*MSG", "MSHAPE", "MSHCOPY",
+ "MSHKEY", "MSHMID", "MSHPATTERN", "MSOLVE", "/MSTART",
+ "MSTOLE", "*MULT", "*MWRITE", "MXPAND", "N", "NANG",
+ "NAXIS", "NCNV", "NDELE", "NDIST", "NDSURF", "NEQIT",
+ "/NERR", "NFORCE", "NGEN", "NKPT", "NLADAPTIVE",
+ "NLDIAG", "NLDPOST", "NLGEOM", "NLHIST", "NLIST",
+ "NLMESH", "NLOG", "NLOPT", "NMODIF", "NOCOLOR",
+ "NODES", "/NOERASE", "/NOLIST", "NOOFFSET", "NOORDER",
+ "/NOPR", "NORA", "NORL", "/NORMAL", "NPLOT", "NPRINT",
+ "NREAD", "NREFINE", "NRLSUM", "*NRM", "NROPT",
+ "NROTAT", "NRRANG", "NSCALE", "NSEL", "NSLA", "NSLE",
+ "NSLK", "NSLL", "NSLV", "NSMOOTH", "NSOL", "NSORT",
+ "NSTORE", "NSUBST", "NSVR", "NSYM", "/NUMBER",
+ "NUMCMP", "NUMEXP", "NUMMRG", "NUMOFF", "NUMSTR",
+ "NUMVAR", "NUSORT", "NWPAVE", "NWPLAN", "NWRITE",
+ "OCDATA", "OCDELETE", "OCLIST", "OCREAD", "OCTABLE",
+ "OCTYPE", "OCZONE", "OMEGA", "OPERATE", "OPNCONTROL",
+ "OUTAERO", "OUTOPT", "OUTPR", "/OUTPUT", "OUTRES",
+ "OVCHECK", "PADELE", "/PAGE", "PAGET", "PAPUT",
+ "PARESU", "PARTSEL", "PARRES", "PARSAV", "PASAVE",
+ "PATH", "PAUSE", "/PBC", "/PBF", "PCALC", "PCGOPT",
+ "PCIRC", "/PCIRCLE", "/PCOPY", "PCROSS", "PDANL",
+ "PDCDF", "PDCFLD", "PDCLR", "PDCMAT", "PDCORR",
+ "PDDMCS", "PDDOEL", "PDEF", "PDEXE", "PDHIST",
+ "PDINQR", "PDLHS", "PDMETH", "PDOT", "PDPINV",
+ "PDPLOT", "PDPROB", "PDRESU", "PDROPT", "/PDS",
+ "PDSAVE", "PDSCAT", "PDSENS", "PDSHIS", "PDUSER",
+ "PDVAR", "PDWRITE", "PERBC2D", "PERTURB", "PFACT",
+ "PHYSICS", "PIVCHECK", "PLCAMP", "PLCFREQ", "PLCHIST",
+ "PLCINT", "PLCPLX", "PLCRACK", "PLDISP", "PLESOL",
+ "PLETAB", "PLFAR", "PLF2D", "PLGEOM", "PLLS", "PLMAP",
+ "PLMC", "PLNEAR", "PLNSOL", "/PLOPTS", "PLORB", "PLOT",
+ "PLOTTING", "PLPAGM", "PLPATH", "PLSECT", "PLST",
+ "PLTIME", "PLTRAC", "PLVAR", "PLVECT", "PLZZ",
+ "/PMACRO", "PMAP", "PMGTRAN", "PMLOPT", "PMLSIZE",
+ "/PMORE", "PNGR", "/PNUM", "POINT", "POLY", "/POLYGON",
+ "/POST1", "/POST26", "POWERH", "PPATH", "PRANGE",
+ "PRAS", "PRCAMP", "PRCINT", "PRCPLX", "PRED",
+ "PRENERGY", "/PREP7", "PRERR", "PRESOL", "PRETAB",
+ "PRFAR", "PRI2", "PRIM", "PRINT", "*PRINT", "PRISM",
+ "PRITER", "PRJSOL", "PRNEAR", "PRNLD", "PRNSOL",
+ "PROD", "PRORB", "PRPATH", "PRRFOR", "PRRSOL",
+ "PRSCONTROL", "PRSECT", "PRTIME", "PRVAR", "PRVECT",
+ "PSCONTROL", "PSCR", "PSDCOM", "PSDFRQ", "PSDGRAPH",
+ "PSDRES", "PSDSPL", "PSDUNIT", "PSDVAL", "PSDWAV",
+ "/PSEARCH", "PSEL", "/PSF", "PSMAT", "PSMESH",
+ "/PSPEC", "/PSTATUS", "PSTRES", "/PSYMB", "PTR",
+ "PTXY", "PVECT", "/PWEDGE", "QDVAL", "QRDOPT", "QSOPT",
+ "QUAD", "/QUIT", "QUOT", "R", "RACE", "RADOPT",
+ "RAPPND", "RATE", "/RATIO", "RBE3", "RCON", "RCYC",
+ "RDEC", "RDELE", "READ", "REAL", "REALVAR", "RECTNG",
+ "REMESH", "/RENAME", "REORDER", "*REPEAT", "/REPLOT",
+ "RESCOMBINE", "RESCONTROL", "RESET", "/RESET", "RESP",
+ "RESUME", "RESVEC", "RESWRITE", "*RETURN", "REXPORT",
+ "REZONE", "RFORCE", "/RGB", "RIGID", "RIGRESP",
+ "RIMPORT", "RLIST", "RMALIST", "RMANL", "RMASTER",
+ "RMCAP", "RMCLIST", "/RMDIR", "RMFLVEC", "RMLVSCALE",
+ "RMMLIST", "RMMRANGE", "RMMSELECT", "RMNDISP",
+ "RMNEVEC", "RMODIF", "RMORE", "RMPORDER", "RMRESUME",
+ "RMRGENERATE", "RMROPTIONS", "RMRPLOT", "RMRSTATUS",
+ "RMSAVE", "RMSMPLE", "RMUSE", "RMXPORT", "ROCK",
+ "ROSE", "RPOLY", "RPR4", "RPRISM", "RPSD", "RSFIT",
+ "RSOPT", "RSPLIT", "RSPLOT", "RSPRNT", "RSSIMS",
+ "RSTMAC", "RSTOFF", "RSURF", "RSYMM", "RSYS", "RTHICK",
+ "SABS", "SADD", "SALLOW", "SAVE", "SBCLIST", "SBCTRAN",
+ "SDELETE", "SE", "SECCONTROL", "SECDATA",
+ "SECFUNCTION", "SECJOINT", "/SECLIB", "SECLOCK",
+ "SECMODIF", "SECNUM", "SECOFFSET", "SECPLOT",
+ "SECREAD", "SECSTOP", "SECTYPE", "SECWRITE", "SED",
+ "SEDLIST", "SEEXP", "/SEG", "SEGEN", "SELIST", "SELM",
+ "SELTOL", "SENERGY", "SEOPT", "SESYMM", "*SET", "SET",
+ "SETFGAP", "SETRAN", "SEXP", "SF", "SFA", "SFACT",
+ "SFADELE", "SFALIST", "SFBEAM", "SFCALC", "SFCUM",
+ "SFDELE", "SFE", "SFEDELE", "SFELIST", "SFFUN",
+ "SFGRAD", "SFL", "SFLDELE", "SFLEX", "SFLIST",
+ "SFLLIST", "SFSCALE", "SFTRAN", "/SHADE", "SHELL",
+ "/SHOW", "/SHOWDISP", "SHPP", "/SHRINK", "SLIST",
+ "SLOAD", "SMALL", "*SMAT", "SMAX", "/SMBC", "SMBODY",
+ "SMCONS", "SMFOR", "SMIN", "SMOOTH", "SMRTSIZE",
+ "SMSURF", "SMULT", "SNOPTION", "SOLU", "/SOLU",
+ "SOLUOPT", "SOLVE", "SORT", "SOURCE", "SPACE",
+ "SPCNOD", "SPCTEMP", "SPDAMP", "SPEC", "SPFREQ",
+ "SPGRAPH", "SPH4", "SPH5", "SPHERE", "SPLINE", "SPLOT",
+ "SPMWRITE", "SPOINT", "SPOPT", "SPREAD", "SPTOPT",
+ "SPOWER", "SPUNIT", "SPVAL", "SQRT", "*SREAD", "SRSS",
+ "SSBT", "/SSCALE", "SSLN", "SSMT", "SSPA", "SSPB",
+ "SSPD", "SSPE", "SSPM", "SSUM", "SSTATE", "STABILIZE",
+ "STAOPT", "STAT", "*STATUS", "/STATUS", "STEF",
+ "/STITLE", "STORE", "SUBOPT", "SUBSET", "SUCALC",
+ "SUCR", "SUDEL", "SUEVAL", "SUGET", "SUMAP", "SUMTYPE",
+ "SUPL", "SUPR", "SURESU", "SUSAVE", "SUSEL", "SUVECT",
+ "SV", "SVPLOT", "SVTYP", "SWADD", "SWDEL", "SWGEN",
+ "SWLIST", "SYNCHRO", "/SYP", "/SYS", "TALLOW",
+ "TARGET", "*TAXIS", "TB", "TBCOPY", "TBDATA", "TBDELE",
+ "TBEO", "TBIN", "TBFIELD", "TBFT", "TBLE", "TBLIST",
+ "TBMODIF", "TBPLOT", "TBPT", "TBTEMP", "TCHG", "/TEE",
+ "TERM", "THEXPAND", "THOPT", "TIFF", "TIME",
+ "TIMERANGE", "TIMINT", "TIMP", "TINTP", "/TITLE",
+ "/TLABEL", "TOFFST", "*TOPER", "TORQ2D", "TORQC2D",
+ "TORQSUM", "TORUS", "TRANS", "TRANSFER", "*TREAD",
+ "TREF", "/TRIAD", "/TRLCY", "TRNOPT", "TRPDEL",
+ "TRPLIS", "TRPOIN", "TRTIME", "TSHAP", "/TSPEC",
+ "TSRES", "TUNIF", "TVAR", "/TXTRE", "/TYPE", "TYPE",
+ "/UCMD", "/UDOC", "/UI", "UIMP", "/UIS", "*ULIB",
+ "UNDELETE", "UNDO", "/UNITS", "UNPAUSE", "UPCOORD",
+ "UPGEOM", "*USE", "/USER", "USRCAL", "USRDOF",
+ "USRELEM", "V", "V2DOPT", "VA", "*VABS", "VADD",
+ "VARDEL", "VARNAM", "VATT", "VCLEAR", "*VCOL",
+ "/VCONE", "VCROSS", "*VCUM", "VDDAM", "VDELE", "VDGL",
+ "VDOT", "VDRAG", "*VEC", "*VEDIT", "VEORIENT", "VEXT",
+ "*VFACT", "*VFILL", "VFOPT", "VFQUERY", "VFSM",
+ "*VFUN", "VGEN", "*VGET", "VGET", "VGLUE", "/VIEW",
+ "VIMP", "VINP", "VINV", "*VITRP", "*VLEN", "VLIST",
+ "VLSCALE", "*VMASK", "VMESH", "VOFFST", "VOLUMES")
+
+ # list of in-built () functions
+ elafunf = ("NX()", "NY()", "NZ()", "KX()", "KY()", "KZ()", "LX()",
+ "LY()", "LZ()", "LSX()", "LSY()", "LSZ()", "NODE()",
+ "KP()", "DISTND()", "DISTKP()", "DISTEN()", "ANGLEN()",
+ "ANGLEK()", "NNEAR()", "KNEAR()", "ENEARN()",
+ "AREAND()", "AREAKP()", "ARNODE()", "NORMNX()",
+ "NORMNY()", "NORMNZ()", "NORMKX()", "NORMKY()",
+ "NORMKZ()", "ENEXTN()", "NELEM()", "NODEDOF()",
+ "ELADJ()", "NDFACE()", "NMFACE()", "ARFACE()", "UX()",
+ "UY()", "UZ()", "ROTX()", "ROTY()", "ROTZ()", "TEMP()",
+ "PRES()", "VX()", "VY()", "VZ()", "ENKE()", "ENDS()",
+ "VOLT()", "MAG()", "AX()", "AY()", "AZ()",
+ "VIRTINQR()", "KWGET()", "VALCHR()", "VALHEX()",
+ "CHRHEX()", "STRFILL()", "STRCOMP()", "STRPOS()",
+ "STRLENG()", "UPCASE()", "LWCASE()", "JOIN()",
+ "SPLIT()", "ABS()", "SIGN()", "CXABS()", "EXP()",
+ "LOG()", "LOG10()", "SQRT()", "NINT()", "MOD()",
+ "RAND()", "GDIS()", "SIN()", "COS()", "TAN()",
+ "SINH()", "COSH()", "TANH()", "ASIN()", "ACOS()",
+ "ATAN()", "ATAN2()")
+
+ elafung = ("NSEL()", "ESEL()", "KSEL()", "LSEL()", "ASEL()",
+ "VSEL()", "NDNEXT()", "ELNEXT()", "KPNEXT()",
+ "LSNEXT()", "ARNEXT()", "VLNEXT()", "CENTRX()",
+ "CENTRY()", "CENTRZ()")
+
+ elafunh = ("~CAT5IN", "~CATIAIN", "~PARAIN", "~PROEIN", "~SATIN",
+ "~UGIN", "A", "AADD", "AATT", "ABEXTRACT", "*ABBR",
+ "ABBRES", "ABBSAV", "ABS", "ACCAT", "ACCOPTION",
+ "ACEL", "ACLEAR", "ADAMS", "ADAPT", "ADD", "ADDAM",
+ "ADELE", "ADGL", "ADRAG", "AESIZE", "AFILLT", "AFLIST",
+ "AFSURF", "*AFUN", "AGEN", "AGLUE", "AINA", "AINP",
+ "AINV", "AL", "ALIST", "ALLSEL", "ALPHAD", "AMAP",
+ "AMESH", "/AN3D", "ANCNTR", "ANCUT", "ANCYC", "ANDATA",
+ "ANDSCL", "ANDYNA", "/ANFILE", "ANFLOW", "/ANGLE",
+ "ANHARM", "ANIM", "ANISOS", "ANMODE", "ANMRES",
+ "/ANNOT", "ANORM", "ANPRES", "ANSOL", "ANSTOAQWA",
+ "ANSTOASAS", "ANTIME", "ANTYPE")
+
+ tokens = {
+ 'root': [
+ (r'!.*\n', Comment),
+ include('strings'),
+ include('core'),
+ include('nums'),
+ (words((elafunb+elafunc+elafund+elafune+elafunh), suffix=r'\b'), Keyword),
+ (words((elafunf+elafung), suffix=r'\b'), Name.Builtin),
+ (r'AR[0-9]+', Name.Variable.Instance),
+ (r'[a-z][a-z0-9_]*', Name.Variable),
(r'[\s]+', Whitespace),
- ],
- 'core': [
- # Operators
- (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
- (r'/EOF', Generic.Emph),
- (r'[(),:&;]', Punctuation),
- ],
- 'strings': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r'[$%]', String.Symbol),
- ],
- 'nums': [
- (r'\d+(?![.ef])', Number.Integer),
- (r'[+-]?\d*\.?\d+([ef][-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.?\d*([ef][-+]?\d+)?', Number.Float),
- ]
- }
+ ],
+ 'core': [
+ # Operators
+ (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
+ (r'/EOF', Generic.Emph),
+ (r'[(),:&;]', Punctuation),
+ ],
+ 'strings': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'[$%]', String.Symbol),
+ ],
+ 'nums': [
+ (r'\d+(?![.ef])', Number.Integer),
+ (r'[+-]?\d*\.?\d+([ef][-+]?\d+)?', Number.Float),
+ (r'[+-]?\d+\.?\d*([ef][-+]?\d+)?', Number.Float),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/apl.py b/contrib/python/Pygments/py3/pygments/lexers/apl.py
index ab6bbe51df..4de74841c6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/apl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/apl.py
@@ -4,7 +4,7 @@
Lexers for APL.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,16 +17,16 @@ __all__ = ['APLLexer']
class APLLexer(RegexLexer):
"""
- A simple `APL <https://en.m.wikipedia.org/wiki/APL_(programming_language)>`_ lexer.
+ A simple `APL <https://en.m.wikipedia.org/wiki/APL_(programming_language)>`_ lexer.
.. versionadded:: 2.0
"""
name = 'APL'
aliases = ['apl']
- filenames = [
- '*.apl', '*.aplf', '*.aplo', '*.apln',
- '*.aplc', '*.apli', '*.dyalog',
- ]
+ filenames = [
+ '*.apl', '*.aplf', '*.aplo', '*.apln',
+ '*.aplc', '*.apli', '*.dyalog',
+ ]
tokens = {
'root': [
@@ -37,7 +37,7 @@ class APLLexer(RegexLexer):
# Comment
# =======
# '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog)
- (r'[⍝#].*$', Comment.Single),
+ (r'[⍝#].*$', Comment.Single),
#
# Strings
# =======
@@ -48,7 +48,7 @@ class APLLexer(RegexLexer):
# ===========
# This token type is used for diamond and parenthesis
# but not for bracket and ; (see below)
- (r'[⋄◇()]', Punctuation),
+ (r'[⋄◇()]', Punctuation),
#
# Array indexing
# ==============
@@ -59,45 +59,45 @@ class APLLexer(RegexLexer):
# Distinguished names
# ===================
# following IBM APL2 standard
- (r'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function),
+ (r'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function),
#
# Labels
# ======
# following IBM APL2 standard
- # (r'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label),
+ # (r'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label),
#
# Variables
# =========
- # following IBM APL2 standard (with a leading _ ok for GNU APL and Dyalog)
- (r'[A-Za-zΔ∆⍙_][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable),
+ # following IBM APL2 standard (with a leading _ ok for GNU APL and Dyalog)
+ (r'[A-Za-zΔ∆⍙_][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable),
#
# Numbers
# =======
- (r'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)'
- r'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?',
+ (r'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)'
+ r'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?',
Number),
#
# Operators
# ==========
- (r'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘⌸&⌶@⌺⍥⍛⍢]', Name.Attribute), # closest token type
- (r'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⊇⍸√⌾…⍮]',
+ (r'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘⌸&⌶@⌺⍥⍛⍢]', Name.Attribute), # closest token type
+ (r'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⊇⍸√⌾…⍮]',
Operator),
#
# Constant
# ========
- (r'⍬', Name.Constant),
+ (r'⍬', Name.Constant),
#
# Quad symbol
# ===========
- (r'[⎕⍞]', Name.Variable.Global),
+ (r'[⎕⍞]', Name.Variable.Global),
#
# Arrows left/right
# =================
- (r'[←→]', Keyword.Declaration),
+ (r'[←→]', Keyword.Declaration),
#
# D-Fn
# ====
- (r'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo),
+ (r'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo),
(r'[{}]', Keyword.Type),
],
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/archetype.py b/contrib/python/Pygments/py3/pygments/lexers/archetype.py
index 4dfd6672fa..bf991ffab6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/archetype.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/archetype.py
@@ -13,7 +13,7 @@
Contributed by Thomas Beale <https://github.com/wolandscat>,
<https://bitbucket.org/thomas_beale>.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -57,7 +57,7 @@ class AtomsLexer(RegexLexer):
(r'P((\d*(\.\d+)?[YyMmWwDd]){1,3}(T(\d*(\.\d+)?[HhMmSs]){,3})?|'
r'T(\d*(\.\d+)?[HhMmSs]){,3})', Literal.Date),
(r'[+-]?(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'[+-]?\d*\.\d+%?', Number.Float),
+ (r'[+-]?\d*\.\d+%?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[+-]?\d+%?', Number.Integer),
],
@@ -211,9 +211,9 @@ class CadlLexer(AtomsLexer):
(r'(not)\W', Operator),
(r'(matches|is_in)\W', Operator),
# is_in / not is_in char
- ('(\u2208|\u2209)', Operator),
+ ('(\u2208|\u2209)', Operator),
# there_exists / not there_exists / for_all / and / or
- ('(\u2203|\u2204|\u2200|\u2227|\u2228|\u22BB|\223C)',
+ ('(\u2203|\u2204|\u2200|\u2227|\u2228|\u22BB|\223C)',
Operator),
# regex in slot or as string constraint
(r'(\{)(\s*)(/[^}]+/)(\s*)(\})',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/arrow.py b/contrib/python/Pygments/py3/pygments/lexers/arrow.py
index 20c9e9b5fb..ef7a1a5beb 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/arrow.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/arrow.py
@@ -1,116 +1,116 @@
-"""
- pygments.lexers.arrow
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Arrow.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, default, include
-from pygments.token import Text, Operator, Keyword, Punctuation, Name, \
+"""
+ pygments.lexers.arrow
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Arrow.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, default, include
+from pygments.token import Text, Operator, Keyword, Punctuation, Name, \
String, Number, Whitespace
-
-__all__ = ['ArrowLexer']
-
-TYPES = r'\b(int|bool|char)((?:\[\])*)(?=\s+)'
-IDENT = r'([a-zA-Z_][a-zA-Z0-9_]*)'
-DECL = TYPES + r'(\s+)' + IDENT
-
-
-class ArrowLexer(RegexLexer):
- """
- Lexer for Arrow: https://pypi.org/project/py-arrow-lang/
-
- .. versionadded:: 2.7
- """
-
- name = 'Arrow'
- aliases = ['arrow']
- filenames = ['*.arw']
-
- tokens = {
- 'root': [
+
+__all__ = ['ArrowLexer']
+
+TYPES = r'\b(int|bool|char)((?:\[\])*)(?=\s+)'
+IDENT = r'([a-zA-Z_][a-zA-Z0-9_]*)'
+DECL = TYPES + r'(\s+)' + IDENT
+
+
+class ArrowLexer(RegexLexer):
+ """
+ Lexer for Arrow: https://pypi.org/project/py-arrow-lang/
+
+ .. versionadded:: 2.7
+ """
+
+ name = 'Arrow'
+ aliases = ['arrow']
+ filenames = ['*.arw']
+
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
- (r'^[|\s]+', Punctuation),
- include('blocks'),
- include('statements'),
- include('expressions'),
- ],
- 'blocks': [
- (r'(function)(\n+)(/-->)(\s*)' +
- DECL + # 4 groups
- r'(\()', bygroups(
+ (r'^[|\s]+', Punctuation),
+ include('blocks'),
+ include('statements'),
+ include('expressions'),
+ ],
+ 'blocks': [
+ (r'(function)(\n+)(/-->)(\s*)' +
+ DECL + # 4 groups
+ r'(\()', bygroups(
Keyword.Reserved, Whitespace, Punctuation,
Whitespace, Keyword.Type, Punctuation, Whitespace,
- Name.Function, Punctuation
- ), 'fparams'),
- (r'/-->$|\\-->$|/--<|\\--<|\^', Punctuation),
- ],
- 'statements': [
- (DECL, bygroups(Keyword.Type, Punctuation, Text, Name.Variable)),
- (r'\[', Punctuation, 'index'),
- (r'=', Operator),
- (r'require|main', Keyword.Reserved),
- (r'print', Keyword.Reserved, 'print'),
- ],
- 'expressions': [
+ Name.Function, Punctuation
+ ), 'fparams'),
+ (r'/-->$|\\-->$|/--<|\\--<|\^', Punctuation),
+ ],
+ 'statements': [
+ (DECL, bygroups(Keyword.Type, Punctuation, Text, Name.Variable)),
+ (r'\[', Punctuation, 'index'),
+ (r'=', Operator),
+ (r'require|main', Keyword.Reserved),
+ (r'print', Keyword.Reserved, 'print'),
+ ],
+ 'expressions': [
(r'\s+', Whitespace),
- (r'[0-9]+', Number.Integer),
- (r'true|false', Keyword.Constant),
- (r"'", String.Char, 'char'),
- (r'"', String.Double, 'string'),
- (r'\{', Punctuation, 'array'),
- (r'==|!=|<|>|\+|-|\*|/|%', Operator),
- (r'and|or|not|length', Operator.Word),
- (r'(input)(\s+)(int|char\[\])', bygroups(
+ (r'[0-9]+', Number.Integer),
+ (r'true|false', Keyword.Constant),
+ (r"'", String.Char, 'char'),
+ (r'"', String.Double, 'string'),
+ (r'\{', Punctuation, 'array'),
+ (r'==|!=|<|>|\+|-|\*|/|%', Operator),
+ (r'and|or|not|length', Operator.Word),
+ (r'(input)(\s+)(int|char\[\])', bygroups(
Keyword.Reserved, Whitespace, Keyword.Type
- )),
- (IDENT + r'(\()', bygroups(
- Name.Function, Punctuation
- ), 'fargs'),
- (IDENT, Name.Variable),
- (r'\[', Punctuation, 'index'),
- (r'\(', Punctuation, 'expressions'),
- (r'\)', Punctuation, '#pop'),
- ],
- 'print': [
- include('expressions'),
- (r',', Punctuation),
- default('#pop'),
- ],
- 'fparams': [
+ )),
+ (IDENT + r'(\()', bygroups(
+ Name.Function, Punctuation
+ ), 'fargs'),
+ (IDENT, Name.Variable),
+ (r'\[', Punctuation, 'index'),
+ (r'\(', Punctuation, 'expressions'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ 'print': [
+ include('expressions'),
+ (r',', Punctuation),
+ default('#pop'),
+ ],
+ 'fparams': [
(DECL, bygroups(Keyword.Type, Punctuation, Whitespace, Name.Variable)),
- (r',', Punctuation),
- (r'\)', Punctuation, '#pop'),
- ],
- 'escape': [
- (r'\\(["\\/abfnrtv]|[0-9]{1,3}|x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4})',
- String.Escape),
- ],
- 'char': [
- (r"'", String.Char, '#pop'),
- include('escape'),
- (r"[^'\\]", String.Char),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- include('escape'),
- (r'[^"\\]+', String.Double),
- ],
- 'array': [
- include('expressions'),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation),
- ],
- 'fargs': [
- include('expressions'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation),
- ],
- 'index': [
- include('expressions'),
- (r'\]', Punctuation, '#pop'),
- ],
- }
+ (r',', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ 'escape': [
+ (r'\\(["\\/abfnrtv]|[0-9]{1,3}|x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4})',
+ String.Escape),
+ ],
+ 'char': [
+ (r"'", String.Char, '#pop'),
+ include('escape'),
+ (r"[^'\\]", String.Char),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ include('escape'),
+ (r'[^"\\]+', String.Double),
+ ],
+ 'array': [
+ include('expressions'),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation),
+ ],
+ 'fargs': [
+ include('expressions'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation),
+ ],
+ 'index': [
+ include('expressions'),
+ (r'\]', Punctuation, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/asc.py b/contrib/python/Pygments/py3/pygments/lexers/asc.py
index b19f429f4c..c91d5f27d2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/asc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/asc.py
@@ -1,51 +1,51 @@
-"""
- pygments.lexers.asc
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for various ASCII armored files.
-
+"""
+ pygments.lexers.asc
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for various ASCII armored files.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import re
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, Generic, Name, Operator, String, Whitespace
-
-__all__ = ['AscLexer']
-
-
-class AscLexer(RegexLexer):
- """
- Lexer for ASCII armored files, containing `-----BEGIN/END ...-----` wrapped base64 data.
-
- .. versionadded:: 2.10
- """
- name = 'ASCII armored'
- aliases = ['asc', 'pem']
- filenames = [
- '*.asc', # PGP; *.gpg, *.pgp, and *.sig too, but those can be binary
- '*.pem', # X.509; *.cer, *.crt, *.csr, and key etc too, but those can be binary
- 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa', # SSH private keys
- ]
- mimetypes = ['application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature']
-
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'^-----BEGIN [^\n]+-----$', Generic.Heading, 'data'),
- (r'\S+', Comment),
- ],
- 'data': [
- (r'\s+', Whitespace),
- (r'^([^:]+)(:)([ \t]+)(.*)', bygroups(Name.Attribute, Operator, Whitespace, String)),
- (r'^-----END [^\n]+-----$', Generic.Heading, 'root'),
- (r'\S+', String),
- ],
- }
-
- def analyse_text(text):
- if re.search(r'^-----BEGIN [^\n]+-----\r?\n', text):
- return True
+ :license: BSD, see LICENSE for details.
+"""
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Comment, Generic, Name, Operator, String, Whitespace
+
+__all__ = ['AscLexer']
+
+
+class AscLexer(RegexLexer):
+ """
+ Lexer for ASCII armored files, containing `-----BEGIN/END ...-----` wrapped base64 data.
+
+ .. versionadded:: 2.10
+ """
+ name = 'ASCII armored'
+ aliases = ['asc', 'pem']
+ filenames = [
+ '*.asc', # PGP; *.gpg, *.pgp, and *.sig too, but those can be binary
+ '*.pem', # X.509; *.cer, *.crt, *.csr, and key etc too, but those can be binary
+ 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa', # SSH private keys
+ ]
+ mimetypes = ['application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature']
+
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+ (r'^-----BEGIN [^\n]+-----$', Generic.Heading, 'data'),
+ (r'\S+', Comment),
+ ],
+ 'data': [
+ (r'\s+', Whitespace),
+ (r'^([^:]+)(:)([ \t]+)(.*)', bygroups(Name.Attribute, Operator, Whitespace, String)),
+ (r'^-----END [^\n]+-----$', Generic.Heading, 'root'),
+ (r'\S+', String),
+ ],
+ }
+
+ def analyse_text(text):
+ if re.search(r'^-----BEGIN [^\n]+-----\r?\n', text):
+ return True
diff --git a/contrib/python/Pygments/py3/pygments/lexers/asm.py b/contrib/python/Pygments/py3/pygments/lexers/asm.py
index e5f795f4f3..c059c18e15 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/asm.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/asm.py
@@ -4,23 +4,23 @@
Lexers for assembly languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using, words, \
- DelegatingLexer, default
+ DelegatingLexer, default
from pygments.lexers.c_cpp import CppLexer, CLexer
from pygments.lexers.d import DLexer
from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
Other, Keyword, Operator, Whitespace
__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer',
- 'CObjdumpLexer', 'HsailLexer', 'LlvmLexer', 'LlvmMirBodyLexer',
- 'LlvmMirLexer', 'NasmLexer', 'NasmObjdumpLexer', 'TasmLexer',
- 'Ca65Lexer', 'Dasm16Lexer']
+ 'CObjdumpLexer', 'HsailLexer', 'LlvmLexer', 'LlvmMirBodyLexer',
+ 'LlvmMirLexer', 'NasmLexer', 'NasmObjdumpLexer', 'TasmLexer',
+ 'Ca65Lexer', 'Dasm16Lexer']
class GasLexer(RegexLexer):
@@ -36,7 +36,7 @@ class GasLexer(RegexLexer):
string = r'"(\\"|[^"])*"'
char = r'[\w$.@-]'
identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
- number = r'(?:0[xX][a-fA-F0-9]+|#?-?\d+)'
+ number = r'(?:0[xX][a-fA-F0-9]+|#?-?\d+)'
register = '%' + identifier + r'\b'
tokens = {
@@ -53,11 +53,11 @@ class GasLexer(RegexLexer):
(string, String),
('@' + identifier, Name.Attribute),
(number, Number.Integer),
- (register, Name.Variable),
+ (register, Name.Variable),
(r'[\r\n]+', Whitespace, '#pop'),
- (r'([;#]|//).*?\n', Comment.Single, '#pop'),
- (r'/[*].*?[*]/', Comment.Multiline),
- (r'/[*].*?\n[\w\W]*?[*]/', Comment.Multiline, '#pop'),
+ (r'([;#]|//).*?\n', Comment.Single, '#pop'),
+ (r'/[*].*?[*]/', Comment.Multiline),
+ (r'/[*].*?\n[\w\W]*?[*]/', Comment.Multiline, '#pop'),
include('punctuation'),
include('whitespace')
@@ -76,14 +76,14 @@ class GasLexer(RegexLexer):
(identifier, Name.Constant),
(number, Number.Integer),
# Registers
- (register, Name.Variable),
+ (register, Name.Variable),
# Numeric constants
('$'+number, Number.Integer),
(r"$'(.|\\')'", String.Char),
(r'[\r\n]+', Whitespace, '#pop'),
- (r'([;#]|//).*?\n', Comment.Single, '#pop'),
- (r'/[*].*?[*]/', Comment.Multiline),
- (r'/[*].*?\n[\w\W]*?[*]/', Comment.Multiline, '#pop'),
+ (r'([;#]|//).*?\n', Comment.Single, '#pop'),
+ (r'/[*].*?[*]/', Comment.Multiline),
+ (r'/[*].*?\n[\w\W]*?[*]/', Comment.Multiline, '#pop'),
include('punctuation'),
include('whitespace')
@@ -91,8 +91,8 @@ class GasLexer(RegexLexer):
'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
- (r'([;#]|//).*?\n', Comment.Single),
- (r'/[*][\w\W]*?[*]/', Comment.Multiline)
+ (r'([;#]|//).*?\n', Comment.Single),
+ (r'/[*][\w\W]*?[*]/', Comment.Multiline)
],
'punctuation': [
(r'[-*,.()\[\]!:]+', Punctuation)
@@ -100,9 +100,9 @@ class GasLexer(RegexLexer):
}
def analyse_text(text):
- if re.search(r'^\.(text|data|section)', text, re.M):
+ if re.search(r'^\.(text|data|section)', text, re.M):
return True
- elif re.search(r'^\.\w+', text, re.M):
+ elif re.search(r'^\.\w+', text, re.M):
return 0.1
@@ -161,7 +161,7 @@ def _objdump_lexer_tokens(asm_lexer):
class ObjdumpLexer(RegexLexer):
"""
- For the output of ``objdump -dr``.
+ For the output of ``objdump -dr``.
"""
name = 'objdump'
aliases = ['objdump']
@@ -173,7 +173,7 @@ class ObjdumpLexer(RegexLexer):
class DObjdumpLexer(DelegatingLexer):
"""
- For the output of ``objdump -Sr`` on compiled D files.
+ For the output of ``objdump -Sr`` on compiled D files.
"""
name = 'd-objdump'
aliases = ['d-objdump']
@@ -181,12 +181,12 @@ class DObjdumpLexer(DelegatingLexer):
mimetypes = ['text/x-d-objdump']
def __init__(self, **options):
- super().__init__(DLexer, ObjdumpLexer, **options)
+ super().__init__(DLexer, ObjdumpLexer, **options)
class CppObjdumpLexer(DelegatingLexer):
"""
- For the output of ``objdump -Sr`` on compiled C++ files.
+ For the output of ``objdump -Sr`` on compiled C++ files.
"""
name = 'cpp-objdump'
aliases = ['cpp-objdump', 'c++-objdumb', 'cxx-objdump']
@@ -194,12 +194,12 @@ class CppObjdumpLexer(DelegatingLexer):
mimetypes = ['text/x-cpp-objdump']
def __init__(self, **options):
- super().__init__(CppLexer, ObjdumpLexer, **options)
+ super().__init__(CppLexer, ObjdumpLexer, **options)
class CObjdumpLexer(DelegatingLexer):
"""
- For the output of ``objdump -Sr`` on compiled C files.
+ For the output of ``objdump -Sr`` on compiled C files.
"""
name = 'c-objdump'
aliases = ['c-objdump']
@@ -207,7 +207,7 @@ class CObjdumpLexer(DelegatingLexer):
mimetypes = ['text/x-c-objdump']
def __init__(self, **options):
- super().__init__(CLexer, ObjdumpLexer, **options)
+ super().__init__(CLexer, ObjdumpLexer, **options)
class HsailLexer(RegexLexer):
@@ -357,14 +357,14 @@ class LlvmLexer(RegexLexer):
#: optional Comment or Whitespace
string = r'"[^"]*?"'
identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
- block_label = r'(' + identifier + r'|(\d+))'
+ block_label = r'(' + identifier + r'|(\d+))'
tokens = {
'root': [
include('whitespace'),
# Before keywords, because keywords are valid label names :(...
- (block_label + r'\s*:', Name.Label),
+ (block_label + r'\s*:', Name.Label),
include('keyword'),
@@ -389,325 +389,325 @@ class LlvmLexer(RegexLexer):
'keyword': [
# Regular keywords
(words((
- 'aarch64_sve_vector_pcs', 'aarch64_vector_pcs', 'acq_rel',
- 'acquire', 'add', 'addrspace', 'addrspacecast', 'afn', 'alias',
- 'aliasee', 'align', 'alignLog2', 'alignstack', 'alloca',
- 'allocsize', 'allOnes', 'alwaysinline', 'alwaysInline',
- 'amdgpu_cs', 'amdgpu_es', 'amdgpu_gfx', 'amdgpu_gs',
- 'amdgpu_hs', 'amdgpu_kernel', 'amdgpu_ls', 'amdgpu_ps',
- 'amdgpu_vs', 'and', 'any', 'anyregcc', 'appending', 'arcp',
- 'argmemonly', 'args', 'arm_aapcs_vfpcc', 'arm_aapcscc',
- 'arm_apcscc', 'ashr', 'asm', 'atomic', 'atomicrmw',
- 'attributes', 'available_externally', 'avr_intrcc',
- 'avr_signalcc', 'bit', 'bitcast', 'bitMask', 'blockaddress',
- 'blockcount', 'br', 'branchFunnel', 'builtin', 'byArg',
- 'byref', 'byte', 'byteArray', 'byval', 'c', 'call', 'callbr',
- 'callee', 'caller', 'calls', 'canAutoHide', 'catch',
- 'catchpad', 'catchret', 'catchswitch', 'cc', 'ccc',
- 'cfguard_checkcc', 'cleanup', 'cleanuppad', 'cleanupret',
- 'cmpxchg', 'cold', 'coldcc', 'comdat', 'common', 'constant',
- 'contract', 'convergent', 'critical', 'cxx_fast_tlscc',
- 'datalayout', 'declare', 'default', 'define', 'deplibs',
- 'dereferenceable', 'dereferenceable_or_null', 'distinct',
- 'dllexport', 'dllimport', 'dso_local', 'dso_local_equivalent',
- 'dso_preemptable', 'dsoLocal', 'eq', 'exact', 'exactmatch',
- 'extern_weak', 'external', 'externally_initialized',
- 'extractelement', 'extractvalue', 'fadd', 'false', 'fast',
- 'fastcc', 'fcmp', 'fdiv', 'fence', 'filter', 'flags', 'fmul',
- 'fneg', 'fpext', 'fptosi', 'fptoui', 'fptrunc', 'freeze',
- 'frem', 'from', 'fsub', 'funcFlags', 'function', 'gc',
- 'getelementptr', 'ghccc', 'global', 'guid', 'gv', 'hash',
- 'hhvm_ccc', 'hhvmcc', 'hidden', 'hot', 'hotness', 'icmp',
- 'ifunc', 'inaccessiblemem_or_argmemonly',
- 'inaccessiblememonly', 'inalloca', 'inbounds', 'indir',
- 'indirectbr', 'info', 'initialexec', 'inline', 'inlineBits',
- 'inlinehint', 'inrange', 'inreg', 'insertelement',
- 'insertvalue', 'insts', 'intel_ocl_bicc', 'inteldialect',
- 'internal', 'inttoptr', 'invoke', 'jumptable', 'kind',
- 'landingpad', 'largest', 'linkage', 'linkonce', 'linkonce_odr',
- 'live', 'load', 'local_unnamed_addr', 'localdynamic',
- 'localexec', 'lshr', 'max', 'metadata', 'min', 'minsize',
- 'module', 'monotonic', 'msp430_intrcc', 'mul', 'mustprogress',
- 'musttail', 'naked', 'name', 'nand', 'ne', 'nest', 'ninf',
- 'nnan', 'noalias', 'nobuiltin', 'nocallback', 'nocapture',
- 'nocf_check', 'noduplicate', 'noduplicates', 'nofree',
- 'noimplicitfloat', 'noinline', 'noInline', 'nomerge', 'none',
- 'nonlazybind', 'nonnull', 'noprofile', 'norecurse',
- 'noRecurse', 'noredzone', 'noreturn', 'nosync', 'notail',
- 'notEligibleToImport', 'noundef', 'nounwind', 'nsw',
- 'nsz', 'null', 'null_pointer_is_valid', 'nuw', 'oeq', 'offset',
- 'oge', 'ogt', 'ole', 'olt', 'one', 'opaque', 'optforfuzzing',
- 'optnone', 'optsize', 'or', 'ord', 'param', 'params',
- 'partition', 'path', 'personality', 'phi', 'poison',
- 'preallocated', 'prefix', 'preserve_allcc', 'preserve_mostcc',
- 'private', 'prologue', 'protected', 'ptrtoint', 'ptx_device',
- 'ptx_kernel', 'readnone', 'readNone', 'readonly', 'readOnly',
- 'reassoc', 'refs', 'relbf', 'release', 'resByArg', 'resume',
- 'ret', 'returnDoesNotAlias', 'returned', 'returns_twice',
- 'safestack', 'samesize', 'sanitize_address',
- 'sanitize_hwaddress', 'sanitize_memory', 'sanitize_memtag',
- 'sanitize_thread', 'sdiv', 'section', 'select', 'seq_cst',
- 'sext', 'sge', 'sgt', 'shadowcallstack', 'shl',
- 'shufflevector', 'sideeffect', 'signext', 'single',
- 'singleImpl', 'singleImplName', 'sitofp', 'sizeM1',
- 'sizeM1BitWidth', 'sle', 'slt', 'source_filename',
- 'speculatable', 'speculative_load_hardening', 'spir_func',
- 'spir_kernel', 'srem', 'sret', 'ssp', 'sspreq', 'sspstrong',
- 'store', 'strictfp', 'sub', 'summaries', 'summary', 'swiftcc',
- 'swifterror', 'swiftself', 'switch', 'syncscope', 'tail',
- 'tailcc', 'target', 'thread_local', 'to', 'token', 'triple',
- 'true', 'trunc', 'type', 'typeCheckedLoadConstVCalls',
- 'typeCheckedLoadVCalls', 'typeid', 'typeidCompatibleVTable',
- 'typeIdInfo', 'typeTestAssumeConstVCalls',
- 'typeTestAssumeVCalls', 'typeTestRes', 'typeTests', 'udiv',
- 'ueq', 'uge', 'ugt', 'uitofp', 'ule', 'ult', 'umax', 'umin',
- 'undef', 'une', 'uniformRetVal', 'uniqueRetVal', 'unknown',
- 'unnamed_addr', 'uno', 'unordered', 'unreachable', 'unsat',
- 'unwind', 'urem', 'uselistorder', 'uselistorder_bb', 'uwtable',
- 'va_arg', 'varFlags', 'variable', 'vcall_visibility',
- 'vFuncId', 'virtFunc', 'virtualConstProp', 'void', 'volatile',
- 'vscale', 'vTableFuncs', 'weak', 'weak_odr', 'webkit_jscc',
- 'win64cc', 'within', 'wpdRes', 'wpdResolutions', 'writeonly',
- 'x', 'x86_64_sysvcc', 'x86_fastcallcc', 'x86_intrcc',
- 'x86_mmx', 'x86_regcallcc', 'x86_stdcallcc', 'x86_thiscallcc',
- 'x86_vectorcallcc', 'xchg', 'xor', 'zeroext',
- 'zeroinitializer', 'zext', 'immarg', 'willreturn'),
+ 'aarch64_sve_vector_pcs', 'aarch64_vector_pcs', 'acq_rel',
+ 'acquire', 'add', 'addrspace', 'addrspacecast', 'afn', 'alias',
+ 'aliasee', 'align', 'alignLog2', 'alignstack', 'alloca',
+ 'allocsize', 'allOnes', 'alwaysinline', 'alwaysInline',
+ 'amdgpu_cs', 'amdgpu_es', 'amdgpu_gfx', 'amdgpu_gs',
+ 'amdgpu_hs', 'amdgpu_kernel', 'amdgpu_ls', 'amdgpu_ps',
+ 'amdgpu_vs', 'and', 'any', 'anyregcc', 'appending', 'arcp',
+ 'argmemonly', 'args', 'arm_aapcs_vfpcc', 'arm_aapcscc',
+ 'arm_apcscc', 'ashr', 'asm', 'atomic', 'atomicrmw',
+ 'attributes', 'available_externally', 'avr_intrcc',
+ 'avr_signalcc', 'bit', 'bitcast', 'bitMask', 'blockaddress',
+ 'blockcount', 'br', 'branchFunnel', 'builtin', 'byArg',
+ 'byref', 'byte', 'byteArray', 'byval', 'c', 'call', 'callbr',
+ 'callee', 'caller', 'calls', 'canAutoHide', 'catch',
+ 'catchpad', 'catchret', 'catchswitch', 'cc', 'ccc',
+ 'cfguard_checkcc', 'cleanup', 'cleanuppad', 'cleanupret',
+ 'cmpxchg', 'cold', 'coldcc', 'comdat', 'common', 'constant',
+ 'contract', 'convergent', 'critical', 'cxx_fast_tlscc',
+ 'datalayout', 'declare', 'default', 'define', 'deplibs',
+ 'dereferenceable', 'dereferenceable_or_null', 'distinct',
+ 'dllexport', 'dllimport', 'dso_local', 'dso_local_equivalent',
+ 'dso_preemptable', 'dsoLocal', 'eq', 'exact', 'exactmatch',
+ 'extern_weak', 'external', 'externally_initialized',
+ 'extractelement', 'extractvalue', 'fadd', 'false', 'fast',
+ 'fastcc', 'fcmp', 'fdiv', 'fence', 'filter', 'flags', 'fmul',
+ 'fneg', 'fpext', 'fptosi', 'fptoui', 'fptrunc', 'freeze',
+ 'frem', 'from', 'fsub', 'funcFlags', 'function', 'gc',
+ 'getelementptr', 'ghccc', 'global', 'guid', 'gv', 'hash',
+ 'hhvm_ccc', 'hhvmcc', 'hidden', 'hot', 'hotness', 'icmp',
+ 'ifunc', 'inaccessiblemem_or_argmemonly',
+ 'inaccessiblememonly', 'inalloca', 'inbounds', 'indir',
+ 'indirectbr', 'info', 'initialexec', 'inline', 'inlineBits',
+ 'inlinehint', 'inrange', 'inreg', 'insertelement',
+ 'insertvalue', 'insts', 'intel_ocl_bicc', 'inteldialect',
+ 'internal', 'inttoptr', 'invoke', 'jumptable', 'kind',
+ 'landingpad', 'largest', 'linkage', 'linkonce', 'linkonce_odr',
+ 'live', 'load', 'local_unnamed_addr', 'localdynamic',
+ 'localexec', 'lshr', 'max', 'metadata', 'min', 'minsize',
+ 'module', 'monotonic', 'msp430_intrcc', 'mul', 'mustprogress',
+ 'musttail', 'naked', 'name', 'nand', 'ne', 'nest', 'ninf',
+ 'nnan', 'noalias', 'nobuiltin', 'nocallback', 'nocapture',
+ 'nocf_check', 'noduplicate', 'noduplicates', 'nofree',
+ 'noimplicitfloat', 'noinline', 'noInline', 'nomerge', 'none',
+ 'nonlazybind', 'nonnull', 'noprofile', 'norecurse',
+ 'noRecurse', 'noredzone', 'noreturn', 'nosync', 'notail',
+ 'notEligibleToImport', 'noundef', 'nounwind', 'nsw',
+ 'nsz', 'null', 'null_pointer_is_valid', 'nuw', 'oeq', 'offset',
+ 'oge', 'ogt', 'ole', 'olt', 'one', 'opaque', 'optforfuzzing',
+ 'optnone', 'optsize', 'or', 'ord', 'param', 'params',
+ 'partition', 'path', 'personality', 'phi', 'poison',
+ 'preallocated', 'prefix', 'preserve_allcc', 'preserve_mostcc',
+ 'private', 'prologue', 'protected', 'ptrtoint', 'ptx_device',
+ 'ptx_kernel', 'readnone', 'readNone', 'readonly', 'readOnly',
+ 'reassoc', 'refs', 'relbf', 'release', 'resByArg', 'resume',
+ 'ret', 'returnDoesNotAlias', 'returned', 'returns_twice',
+ 'safestack', 'samesize', 'sanitize_address',
+ 'sanitize_hwaddress', 'sanitize_memory', 'sanitize_memtag',
+ 'sanitize_thread', 'sdiv', 'section', 'select', 'seq_cst',
+ 'sext', 'sge', 'sgt', 'shadowcallstack', 'shl',
+ 'shufflevector', 'sideeffect', 'signext', 'single',
+ 'singleImpl', 'singleImplName', 'sitofp', 'sizeM1',
+ 'sizeM1BitWidth', 'sle', 'slt', 'source_filename',
+ 'speculatable', 'speculative_load_hardening', 'spir_func',
+ 'spir_kernel', 'srem', 'sret', 'ssp', 'sspreq', 'sspstrong',
+ 'store', 'strictfp', 'sub', 'summaries', 'summary', 'swiftcc',
+ 'swifterror', 'swiftself', 'switch', 'syncscope', 'tail',
+ 'tailcc', 'target', 'thread_local', 'to', 'token', 'triple',
+ 'true', 'trunc', 'type', 'typeCheckedLoadConstVCalls',
+ 'typeCheckedLoadVCalls', 'typeid', 'typeidCompatibleVTable',
+ 'typeIdInfo', 'typeTestAssumeConstVCalls',
+ 'typeTestAssumeVCalls', 'typeTestRes', 'typeTests', 'udiv',
+ 'ueq', 'uge', 'ugt', 'uitofp', 'ule', 'ult', 'umax', 'umin',
+ 'undef', 'une', 'uniformRetVal', 'uniqueRetVal', 'unknown',
+ 'unnamed_addr', 'uno', 'unordered', 'unreachable', 'unsat',
+ 'unwind', 'urem', 'uselistorder', 'uselistorder_bb', 'uwtable',
+ 'va_arg', 'varFlags', 'variable', 'vcall_visibility',
+ 'vFuncId', 'virtFunc', 'virtualConstProp', 'void', 'volatile',
+ 'vscale', 'vTableFuncs', 'weak', 'weak_odr', 'webkit_jscc',
+ 'win64cc', 'within', 'wpdRes', 'wpdResolutions', 'writeonly',
+ 'x', 'x86_64_sysvcc', 'x86_fastcallcc', 'x86_intrcc',
+ 'x86_mmx', 'x86_regcallcc', 'x86_stdcallcc', 'x86_thiscallcc',
+ 'x86_vectorcallcc', 'xchg', 'xor', 'zeroext',
+ 'zeroinitializer', 'zext', 'immarg', 'willreturn'),
suffix=r'\b'), Keyword),
# Types
- (words(('void', 'half', 'bfloat', 'float', 'double', 'fp128',
- 'x86_fp80', 'ppc_fp128', 'label', 'metadata', 'x86_mmx',
- 'x86_amx', 'token')),
- Keyword.Type),
+ (words(('void', 'half', 'bfloat', 'float', 'double', 'fp128',
+ 'x86_fp80', 'ppc_fp128', 'label', 'metadata', 'x86_mmx',
+ 'x86_amx', 'token')),
+ Keyword.Type),
# Integer types
- (r'i[1-9]\d*', Keyword.Type)
+ (r'i[1-9]\d*', Keyword.Type)
]
}
-class LlvmMirBodyLexer(RegexLexer):
- """
- For LLVM MIR examples without the YAML wrapper.
-
- For more information on LLVM MIR see https://llvm.org/docs/MIRLangRef.html.
-
- .. versionadded:: 2.6
- """
- name = 'LLVM-MIR Body'
- aliases = ['llvm-mir-body']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- # Attributes on basic blocks
- (words(('liveins', 'successors'), suffix=':'), Keyword),
- # Basic Block Labels
- (r'bb\.[0-9]+(\.[a-zA-Z0-9_.-]+)?( \(address-taken\))?:', Name.Label),
- (r'bb\.[0-9]+ \(%[a-zA-Z0-9_.-]+\)( \(address-taken\))?:', Name.Label),
- (r'%bb\.[0-9]+(\.\w+)?', Name.Label),
- # Stack references
- (r'%stack\.[0-9]+(\.\w+\.addr)?', Name),
- # Subreg indices
- (r'%subreg\.\w+', Name),
- # Virtual registers
- (r'%[a-zA-Z0-9_]+ *', Name.Variable, 'vreg'),
- # Reference to LLVM-IR global
- include('global'),
- # Reference to Intrinsic
- (r'intrinsic\(\@[a-zA-Z0-9_.]+\)', Name.Variable.Global),
- # Comparison predicates
- (words(('eq', 'ne', 'sgt', 'sge', 'slt', 'sle', 'ugt', 'uge', 'ult',
- 'ule'), prefix=r'intpred\(', suffix=r'\)'), Name.Builtin),
- (words(('oeq', 'one', 'ogt', 'oge', 'olt', 'ole', 'ugt', 'uge',
- 'ult', 'ule'), prefix=r'floatpred\(', suffix=r'\)'),
- Name.Builtin),
- # Physical registers
- (r'\$\w+', String.Single),
- # Assignment operator
- (r'=', Operator),
- # gMIR Opcodes
- (r'(G_ANYEXT|G_[SZ]EXT|G_SEXT_INREG|G_TRUNC|G_IMPLICIT_DEF|G_PHI|'
- r'G_FRAME_INDEX|G_GLOBAL_VALUE|G_INTTOPTR|G_PTRTOINT|G_BITCAST|'
- r'G_CONSTANT|G_FCONSTANT|G_VASTART|G_VAARG|G_CTLZ|G_CTLZ_ZERO_UNDEF|'
- r'G_CTTZ|G_CTTZ_ZERO_UNDEF|G_CTPOP|G_BSWAP|G_BITREVERSE|'
- r'G_ADDRSPACE_CAST|G_BLOCK_ADDR|G_JUMP_TABLE|G_DYN_STACKALLOC|'
- r'G_ADD|G_SUB|G_MUL|G_[SU]DIV|G_[SU]REM|G_AND|G_OR|G_XOR|G_SHL|'
- r'G_[LA]SHR|G_[IF]CMP|G_SELECT|G_GEP|G_PTR_MASK|G_SMIN|G_SMAX|'
- r'G_UMIN|G_UMAX|G_[US]ADDO|G_[US]ADDE|G_[US]SUBO|G_[US]SUBE|'
- r'G_[US]MULO|G_[US]MULH|G_FNEG|G_FPEXT|G_FPTRUNC|G_FPTO[US]I|'
- r'G_[US]ITOFP|G_FABS|G_FCOPYSIGN|G_FCANONICALIZE|G_FMINNUM|'
- r'G_FMAXNUM|G_FMINNUM_IEEE|G_FMAXNUM_IEEE|G_FMINIMUM|G_FMAXIMUM|'
- r'G_FADD|G_FSUB|G_FMUL|G_FMA|G_FMAD|G_FDIV|G_FREM|G_FPOW|G_FEXP|'
- r'G_FEXP2|G_FLOG|G_FLOG2|G_FLOG10|G_FCEIL|G_FCOS|G_FSIN|G_FSQRT|'
- r'G_FFLOOR|G_FRINT|G_FNEARBYINT|G_INTRINSIC_TRUNC|'
- r'G_INTRINSIC_ROUND|G_LOAD|G_[ZS]EXTLOAD|G_INDEXED_LOAD|'
- r'G_INDEXED_[ZS]EXTLOAD|G_STORE|G_INDEXED_STORE|'
- r'G_ATOMIC_CMPXCHG_WITH_SUCCESS|G_ATOMIC_CMPXCHG|'
- r'G_ATOMICRMW_(XCHG|ADD|SUB|AND|NAND|OR|XOR|MAX|MIN|UMAX|UMIN|FADD|'
- r'FSUB)'
- r'|G_FENCE|G_EXTRACT|G_UNMERGE_VALUES|G_INSERT|G_MERGE_VALUES|'
- r'G_BUILD_VECTOR|G_BUILD_VECTOR_TRUNC|G_CONCAT_VECTORS|'
- r'G_INTRINSIC|G_INTRINSIC_W_SIDE_EFFECTS|G_BR|G_BRCOND|'
- r'G_BRINDIRECT|G_BRJT|G_INSERT_VECTOR_ELT|G_EXTRACT_VECTOR_ELT|'
- r'G_SHUFFLE_VECTOR)\b',
- Name.Builtin),
- # Target independent opcodes
- (r'(COPY|PHI|INSERT_SUBREG|EXTRACT_SUBREG|REG_SEQUENCE)\b',
- Name.Builtin),
- # Flags
- (words(('killed', 'implicit')), Keyword),
- # ConstantInt values
+class LlvmMirBodyLexer(RegexLexer):
+ """
+ For LLVM MIR examples without the YAML wrapper.
+
+ For more information on LLVM MIR see https://llvm.org/docs/MIRLangRef.html.
+
+ .. versionadded:: 2.6
+ """
+ name = 'LLVM-MIR Body'
+ aliases = ['llvm-mir-body']
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ # Attributes on basic blocks
+ (words(('liveins', 'successors'), suffix=':'), Keyword),
+ # Basic Block Labels
+ (r'bb\.[0-9]+(\.[a-zA-Z0-9_.-]+)?( \(address-taken\))?:', Name.Label),
+ (r'bb\.[0-9]+ \(%[a-zA-Z0-9_.-]+\)( \(address-taken\))?:', Name.Label),
+ (r'%bb\.[0-9]+(\.\w+)?', Name.Label),
+ # Stack references
+ (r'%stack\.[0-9]+(\.\w+\.addr)?', Name),
+ # Subreg indices
+ (r'%subreg\.\w+', Name),
+ # Virtual registers
+ (r'%[a-zA-Z0-9_]+ *', Name.Variable, 'vreg'),
+ # Reference to LLVM-IR global
+ include('global'),
+ # Reference to Intrinsic
+ (r'intrinsic\(\@[a-zA-Z0-9_.]+\)', Name.Variable.Global),
+ # Comparison predicates
+ (words(('eq', 'ne', 'sgt', 'sge', 'slt', 'sle', 'ugt', 'uge', 'ult',
+ 'ule'), prefix=r'intpred\(', suffix=r'\)'), Name.Builtin),
+ (words(('oeq', 'one', 'ogt', 'oge', 'olt', 'ole', 'ugt', 'uge',
+ 'ult', 'ule'), prefix=r'floatpred\(', suffix=r'\)'),
+ Name.Builtin),
+ # Physical registers
+ (r'\$\w+', String.Single),
+ # Assignment operator
+ (r'=', Operator),
+ # gMIR Opcodes
+ (r'(G_ANYEXT|G_[SZ]EXT|G_SEXT_INREG|G_TRUNC|G_IMPLICIT_DEF|G_PHI|'
+ r'G_FRAME_INDEX|G_GLOBAL_VALUE|G_INTTOPTR|G_PTRTOINT|G_BITCAST|'
+ r'G_CONSTANT|G_FCONSTANT|G_VASTART|G_VAARG|G_CTLZ|G_CTLZ_ZERO_UNDEF|'
+ r'G_CTTZ|G_CTTZ_ZERO_UNDEF|G_CTPOP|G_BSWAP|G_BITREVERSE|'
+ r'G_ADDRSPACE_CAST|G_BLOCK_ADDR|G_JUMP_TABLE|G_DYN_STACKALLOC|'
+ r'G_ADD|G_SUB|G_MUL|G_[SU]DIV|G_[SU]REM|G_AND|G_OR|G_XOR|G_SHL|'
+ r'G_[LA]SHR|G_[IF]CMP|G_SELECT|G_GEP|G_PTR_MASK|G_SMIN|G_SMAX|'
+ r'G_UMIN|G_UMAX|G_[US]ADDO|G_[US]ADDE|G_[US]SUBO|G_[US]SUBE|'
+ r'G_[US]MULO|G_[US]MULH|G_FNEG|G_FPEXT|G_FPTRUNC|G_FPTO[US]I|'
+ r'G_[US]ITOFP|G_FABS|G_FCOPYSIGN|G_FCANONICALIZE|G_FMINNUM|'
+ r'G_FMAXNUM|G_FMINNUM_IEEE|G_FMAXNUM_IEEE|G_FMINIMUM|G_FMAXIMUM|'
+ r'G_FADD|G_FSUB|G_FMUL|G_FMA|G_FMAD|G_FDIV|G_FREM|G_FPOW|G_FEXP|'
+ r'G_FEXP2|G_FLOG|G_FLOG2|G_FLOG10|G_FCEIL|G_FCOS|G_FSIN|G_FSQRT|'
+ r'G_FFLOOR|G_FRINT|G_FNEARBYINT|G_INTRINSIC_TRUNC|'
+ r'G_INTRINSIC_ROUND|G_LOAD|G_[ZS]EXTLOAD|G_INDEXED_LOAD|'
+ r'G_INDEXED_[ZS]EXTLOAD|G_STORE|G_INDEXED_STORE|'
+ r'G_ATOMIC_CMPXCHG_WITH_SUCCESS|G_ATOMIC_CMPXCHG|'
+ r'G_ATOMICRMW_(XCHG|ADD|SUB|AND|NAND|OR|XOR|MAX|MIN|UMAX|UMIN|FADD|'
+ r'FSUB)'
+ r'|G_FENCE|G_EXTRACT|G_UNMERGE_VALUES|G_INSERT|G_MERGE_VALUES|'
+ r'G_BUILD_VECTOR|G_BUILD_VECTOR_TRUNC|G_CONCAT_VECTORS|'
+ r'G_INTRINSIC|G_INTRINSIC_W_SIDE_EFFECTS|G_BR|G_BRCOND|'
+ r'G_BRINDIRECT|G_BRJT|G_INSERT_VECTOR_ELT|G_EXTRACT_VECTOR_ELT|'
+ r'G_SHUFFLE_VECTOR)\b',
+ Name.Builtin),
+ # Target independent opcodes
+ (r'(COPY|PHI|INSERT_SUBREG|EXTRACT_SUBREG|REG_SEQUENCE)\b',
+ Name.Builtin),
+ # Flags
+ (words(('killed', 'implicit')), Keyword),
+ # ConstantInt values
(r'(i[0-9]+)( +)', bygroups(Keyword.Type, Whitespace), 'constantint'),
- # ConstantFloat values
- (r'(half|float|double) +', Keyword.Type, 'constantfloat'),
- # Bare immediates
- include('integer'),
- # MMO's
+ # ConstantFloat values
+ (r'(half|float|double) +', Keyword.Type, 'constantfloat'),
+ # Bare immediates
+ include('integer'),
+ # MMO's
(r'(::)( *)', bygroups(Operator, Whitespace), 'mmo'),
- # MIR Comments
- (r';.*', Comment),
- # If we get here, assume it's a target instruction
- (r'[a-zA-Z0-9_]+', Name),
- # Everything else that isn't highlighted
- (r'[(), \n]+', Text),
- ],
- # The integer constant from a ConstantInt value
- 'constantint': [
- include('integer'),
- (r'(?=.)', Text, '#pop'),
- ],
- # The floating point constant from a ConstantFloat value
- 'constantfloat': [
- include('float'),
- (r'(?=.)', Text, '#pop'),
- ],
- 'vreg': [
- # The bank or class if there is one
+ # MIR Comments
+ (r';.*', Comment),
+ # If we get here, assume it's a target instruction
+ (r'[a-zA-Z0-9_]+', Name),
+ # Everything else that isn't highlighted
+ (r'[(), \n]+', Text),
+ ],
+ # The integer constant from a ConstantInt value
+ 'constantint': [
+ include('integer'),
+ (r'(?=.)', Text, '#pop'),
+ ],
+ # The floating point constant from a ConstantFloat value
+ 'constantfloat': [
+ include('float'),
+ (r'(?=.)', Text, '#pop'),
+ ],
+ 'vreg': [
+ # The bank or class if there is one
(r'( *)(:(?!:))', bygroups(Whitespace, Keyword), ('#pop', 'vreg_bank_or_class')),
- # The LLT if there is one
+ # The LLT if there is one
(r'( *)(\()', bygroups(Whitespace, Text), 'vreg_type'),
- (r'(?=.)', Text, '#pop'),
- ],
- 'vreg_bank_or_class': [
- # The unassigned bank/class
+ (r'(?=.)', Text, '#pop'),
+ ],
+ 'vreg_bank_or_class': [
+ # The unassigned bank/class
(r'( *)(_)', bygroups(Whitespace, Name.Variable.Magic)),
(r'( *)([a-zA-Z0-9_]+)', bygroups(Whitespace, Name.Variable)),
- # The LLT if there is one
+ # The LLT if there is one
(r'( *)(\()', bygroups(Whitespace, Text), 'vreg_type'),
- (r'(?=.)', Text, '#pop'),
- ],
- 'vreg_type': [
- # Scalar and pointer types
+ (r'(?=.)', Text, '#pop'),
+ ],
+ 'vreg_type': [
+ # Scalar and pointer types
(r'( *)([sp][0-9]+)', bygroups(Whitespace, Keyword.Type)),
(r'( *)(<[0-9]+ *x *[sp][0-9]+>)', bygroups(Whitespace, Keyword.Type)),
- (r'\)', Text, '#pop'),
- (r'(?=.)', Text, '#pop'),
- ],
- 'mmo': [
- (r'\(', Text),
+ (r'\)', Text, '#pop'),
+ (r'(?=.)', Text, '#pop'),
+ ],
+ 'mmo': [
+ (r'\(', Text),
(r' +', Whitespace),
- (words(('load', 'store', 'on', 'into', 'from', 'align', 'monotonic',
- 'acquire', 'release', 'acq_rel', 'seq_cst')),
- Keyword),
- # IR references
- (r'%ir\.[a-zA-Z0-9_.-]+', Name),
- (r'%ir-block\.[a-zA-Z0-9_.-]+', Name),
- (r'[-+]', Operator),
- include('integer'),
- include('global'),
- (r',', Punctuation),
- (r'\), \(', Text),
- (r'\)', Text, '#pop'),
- ],
- 'integer': [(r'-?[0-9]+', Number.Integer),],
- 'float': [(r'-?[0-9]+\.[0-9]+(e[+-][0-9]+)?', Number.Float)],
- 'global': [(r'\@[a-zA-Z0-9_.]+', Name.Variable.Global)],
- }
-
-
-class LlvmMirLexer(RegexLexer):
- """
- Lexer for the overall LLVM MIR document format.
-
- MIR is a human readable serialization format that's used to represent LLVM's
- machine specific intermediate representation. It allows LLVM's developers to
- see the state of the compilation process at various points, as well as test
- individual pieces of the compiler.
-
- For more information on LLVM MIR see https://llvm.org/docs/MIRLangRef.html.
-
- .. versionadded:: 2.6
- """
- name = 'LLVM-MIR'
- aliases = ['llvm-mir']
- filenames = ['*.mir']
-
- tokens = {
- 'root': [
- # Comments are hashes at the YAML level
- (r'#.*', Comment),
- # Documents starting with | are LLVM-IR
- (r'--- \|$', Keyword, 'llvm_ir'),
- # Other documents are MIR
- (r'---', Keyword, 'llvm_mir'),
- # Consume everything else in one token for efficiency
- (r'[^-#]+|.', Text),
- ],
- 'llvm_ir': [
- # Documents end with '...' or '---'
- (r'(\.\.\.|(?=---))', Keyword, '#pop'),
- # Delegate to the LlvmLexer
- (r'((?:.|\n)+?)(?=(\.\.\.|---))', bygroups(using(LlvmLexer))),
- ],
- 'llvm_mir': [
- # Comments are hashes at the YAML level
- (r'#.*', Comment),
- # Documents end with '...' or '---'
- (r'(\.\.\.|(?=---))', Keyword, '#pop'),
- # Handle the simple attributes
- (r'name:', Keyword, 'name'),
- (words(('alignment', ),
- suffix=':'), Keyword, 'number'),
- (words(('legalized', 'regBankSelected', 'tracksRegLiveness',
- 'selected', 'exposesReturnsTwice'),
- suffix=':'), Keyword, 'boolean'),
- # Handle the attributes don't highlight inside
- (words(('registers', 'stack', 'fixedStack', 'liveins', 'frameInfo',
- 'machineFunctionInfo'),
- suffix=':'), Keyword),
- # Delegate the body block to the LlvmMirBodyLexer
- (r'body: *\|', Keyword, 'llvm_mir_body'),
- # Consume everything else
- (r'.+', Text),
+ (words(('load', 'store', 'on', 'into', 'from', 'align', 'monotonic',
+ 'acquire', 'release', 'acq_rel', 'seq_cst')),
+ Keyword),
+ # IR references
+ (r'%ir\.[a-zA-Z0-9_.-]+', Name),
+ (r'%ir-block\.[a-zA-Z0-9_.-]+', Name),
+ (r'[-+]', Operator),
+ include('integer'),
+ include('global'),
+ (r',', Punctuation),
+ (r'\), \(', Text),
+ (r'\)', Text, '#pop'),
+ ],
+ 'integer': [(r'-?[0-9]+', Number.Integer),],
+ 'float': [(r'-?[0-9]+\.[0-9]+(e[+-][0-9]+)?', Number.Float)],
+ 'global': [(r'\@[a-zA-Z0-9_.]+', Name.Variable.Global)],
+ }
+
+
+class LlvmMirLexer(RegexLexer):
+ """
+ Lexer for the overall LLVM MIR document format.
+
+ MIR is a human readable serialization format that's used to represent LLVM's
+ machine specific intermediate representation. It allows LLVM's developers to
+ see the state of the compilation process at various points, as well as test
+ individual pieces of the compiler.
+
+ For more information on LLVM MIR see https://llvm.org/docs/MIRLangRef.html.
+
+ .. versionadded:: 2.6
+ """
+ name = 'LLVM-MIR'
+ aliases = ['llvm-mir']
+ filenames = ['*.mir']
+
+ tokens = {
+ 'root': [
+ # Comments are hashes at the YAML level
+ (r'#.*', Comment),
+ # Documents starting with | are LLVM-IR
+ (r'--- \|$', Keyword, 'llvm_ir'),
+ # Other documents are MIR
+ (r'---', Keyword, 'llvm_mir'),
+ # Consume everything else in one token for efficiency
+ (r'[^-#]+|.', Text),
+ ],
+ 'llvm_ir': [
+ # Documents end with '...' or '---'
+ (r'(\.\.\.|(?=---))', Keyword, '#pop'),
+ # Delegate to the LlvmLexer
+ (r'((?:.|\n)+?)(?=(\.\.\.|---))', bygroups(using(LlvmLexer))),
+ ],
+ 'llvm_mir': [
+ # Comments are hashes at the YAML level
+ (r'#.*', Comment),
+ # Documents end with '...' or '---'
+ (r'(\.\.\.|(?=---))', Keyword, '#pop'),
+ # Handle the simple attributes
+ (r'name:', Keyword, 'name'),
+ (words(('alignment', ),
+ suffix=':'), Keyword, 'number'),
+ (words(('legalized', 'regBankSelected', 'tracksRegLiveness',
+ 'selected', 'exposesReturnsTwice'),
+ suffix=':'), Keyword, 'boolean'),
+ # Handle the attributes don't highlight inside
+ (words(('registers', 'stack', 'fixedStack', 'liveins', 'frameInfo',
+ 'machineFunctionInfo'),
+ suffix=':'), Keyword),
+ # Delegate the body block to the LlvmMirBodyLexer
+ (r'body: *\|', Keyword, 'llvm_mir_body'),
+ # Consume everything else
+ (r'.+', Text),
(r'\n', Whitespace),
- ],
- 'name': [
- (r'[^\n]+', Name),
- default('#pop'),
- ],
- 'boolean': [
- (r' *(true|false)', Name.Builtin),
- default('#pop'),
- ],
- 'number': [
- (r' *[0-9]+', Number),
- default('#pop'),
- ],
- 'llvm_mir_body': [
- # Documents end with '...' or '---'.
- # We have to pop llvm_mir_body and llvm_mir
- (r'(\.\.\.|(?=---))', Keyword, '#pop:2'),
- # Delegate the body block to the LlvmMirBodyLexer
- (r'((?:.|\n)+?)(?=\.\.\.|---)', bygroups(using(LlvmMirBodyLexer))),
- # The '...' is optional. If we didn't already find it then it isn't
- # there. There might be a '---' instead though.
- (r'(?!\.\.\.|---)((?:.|\n)+)', bygroups(using(LlvmMirBodyLexer))),
- ],
- }
-
-
+ ],
+ 'name': [
+ (r'[^\n]+', Name),
+ default('#pop'),
+ ],
+ 'boolean': [
+ (r' *(true|false)', Name.Builtin),
+ default('#pop'),
+ ],
+ 'number': [
+ (r' *[0-9]+', Number),
+ default('#pop'),
+ ],
+ 'llvm_mir_body': [
+ # Documents end with '...' or '---'.
+ # We have to pop llvm_mir_body and llvm_mir
+ (r'(\.\.\.|(?=---))', Keyword, '#pop:2'),
+ # Delegate the body block to the LlvmMirBodyLexer
+ (r'((?:.|\n)+?)(?=\.\.\.|---)', bygroups(using(LlvmMirBodyLexer))),
+ # The '...' is optional. If we didn't already find it then it isn't
+ # there. There might be a '---' instead though.
+ (r'(?!\.\.\.|---)((?:.|\n)+)', bygroups(using(LlvmMirBodyLexer))),
+ ],
+ }
+
+
class NasmLexer(RegexLexer):
"""
For Nasm (Intel) assembly code.
@@ -717,10 +717,10 @@ class NasmLexer(RegexLexer):
filenames = ['*.asm', '*.ASM']
mimetypes = ['text/x-nasm']
- # Tasm uses the same file endings, but TASM is not as common as NASM, so
- # we prioritize NASM higher by default
- priority = 1.0
-
+ # Tasm uses the same file endings, but TASM is not as common as NASM, so
+ # we prioritize NASM higher by default
+ priority = 1.0
+
identifier = r'[a-z$._?][\w$.?#@~]*'
hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
octn = r'[0-7]+q'
@@ -787,15 +787,15 @@ class NasmLexer(RegexLexer):
],
}
- def analyse_text(text):
- # Probably TASM
- if re.match(r'PROC', text, re.IGNORECASE):
- return False
-
+ def analyse_text(text):
+ # Probably TASM
+ if re.match(r'PROC', text, re.IGNORECASE):
+ return False
+
class NasmObjdumpLexer(ObjdumpLexer):
"""
- For the output of ``objdump -d -M intel``.
+ For the output of ``objdump -d -M intel``.
.. versionadded:: 2.0
"""
@@ -886,12 +886,12 @@ class TasmLexer(RegexLexer):
],
}
- def analyse_text(text):
- # See above
- if re.match(r'PROC', text, re.I):
- return True
-
+ def analyse_text(text):
+ # See above
+ if re.match(r'PROC', text, re.I):
+ return True
+
class Ca65Lexer(RegexLexer):
"""
For ca65 assembler sources.
@@ -927,13 +927,13 @@ class Ca65Lexer(RegexLexer):
def analyse_text(self, text):
# comments in GAS start with "#"
- if re.search(r'^\s*;', text, re.MULTILINE):
+ if re.search(r'^\s*;', text, re.MULTILINE):
return 0.9
class Dasm16Lexer(RegexLexer):
"""
- For DCPU-16 Assembly.
+ For DCPU-16 Assembly.
Check http://0x10c.com/doc/dcpu-16.txt
@@ -968,7 +968,7 @@ class Dasm16Lexer(RegexLexer):
]
# Regexes yo
- char = r'[a-zA-Z0-9_$@.]'
+ char = r'[a-zA-Z0-9_$@.]'
identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
number = r'[+-]?(?:0[xX][a-zA-Z0-9]+|\d+)'
binary_number = r'0b[01_]+'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/automation.py b/contrib/python/Pygments/py3/pygments/lexers/automation.py
index 7b03e39a06..92612bb3b6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/automation.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/automation.py
@@ -4,7 +4,7 @@
Lexers for automation scripting languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,7 +22,7 @@ class AutohotkeyLexer(RegexLexer):
.. versionadded:: 1.4
"""
name = 'autohotkey'
- aliases = ['autohotkey', 'ahk']
+ aliases = ['autohotkey', 'ahk']
filenames = ['*.ahk', '*.ahkl']
mimetypes = ['text/x-autohotkey']
@@ -326,7 +326,7 @@ class AutoItLexer(RegexLexer):
include('builtInFunctions'),
include('builtInMarcros'),
(r'"', String, combined('stringescape', 'dqs')),
- (r"'", String, 'sqs'),
+ (r"'", String, 'sqs'),
include('numbers'),
(r'[a-zA-Z_#@$][\w#@$]*', Name),
(r'\\|\'', Text),
@@ -368,11 +368,11 @@ class AutoItLexer(RegexLexer):
(r'"', String, '#pop'),
include('strings')
],
- 'sqs': [
- (r'\'\'|\`([,%`abfnrtv])', String.Escape),
- (r"'", String, '#pop'),
- (r"[^'\n]+", String)
- ],
+ 'sqs': [
+ (r'\'\'|\`([,%`abfnrtv])', String.Escape),
+ (r"'", String, '#pop'),
+ (r"[^'\n]+", String)
+ ],
'garbage': [
(r'[^\S\n]', Text),
],
diff --git a/contrib/python/Pygments/py3/pygments/lexers/bare.py b/contrib/python/Pygments/py3/pygments/lexers/bare.py
index bd5855f166..56ed586135 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/bare.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/bare.py
@@ -1,103 +1,103 @@
-"""
- pygments.lexers.bare
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the BARE schema.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, words, bygroups
+"""
+ pygments.lexers.bare
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the BARE schema.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, words, bygroups
from pygments.token import Text, Comment, Keyword, Name, Literal, Whitespace
-
-__all__ = ['BareLexer']
-
-
-class BareLexer(RegexLexer):
- """
- For `BARE schema <https://baremessages.org>`_ schema source.
-
- .. versionadded:: 2.7
- """
- name = 'BARE'
- filenames = ['*.bare']
- aliases = ['bare']
-
- flags = re.MULTILINE | re.UNICODE
-
- keywords = [
- 'type',
- 'enum',
- 'u8',
- 'u16',
- 'u32',
- 'u64',
- 'uint',
- 'i8',
- 'i16',
- 'i32',
- 'i64',
- 'int',
- 'f32',
- 'f64',
- 'bool',
- 'void',
- 'data',
- 'string',
- 'optional',
- 'map',
- ]
-
- tokens = {
- 'root': [
+
+__all__ = ['BareLexer']
+
+
+class BareLexer(RegexLexer):
+ """
+ For `BARE schema <https://baremessages.org>`_ schema source.
+
+ .. versionadded:: 2.7
+ """
+ name = 'BARE'
+ filenames = ['*.bare']
+ aliases = ['bare']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ keywords = [
+ 'type',
+ 'enum',
+ 'u8',
+ 'u16',
+ 'u32',
+ 'u64',
+ 'uint',
+ 'i8',
+ 'i16',
+ 'i32',
+ 'i64',
+ 'int',
+ 'f32',
+ 'f64',
+ 'bool',
+ 'void',
+ 'data',
+ 'string',
+ 'optional',
+ 'map',
+ ]
+
+ tokens = {
+ 'root': [
(r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+)(\{)',
bygroups(Keyword, Whitespace, Name.Class, Whitespace, Text), 'struct'),
(r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+)(\()',
bygroups(Keyword, Whitespace, Name.Class, Whitespace, Text), 'union'),
- (r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+)',
+ (r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+)',
bygroups(Keyword, Whitespace, Name, Whitespace), 'typedef'),
- (r'(enum)(\s+)([A-Z][a-zA-Z0-9]+)(\s+\{)',
+ (r'(enum)(\s+)([A-Z][a-zA-Z0-9]+)(\s+\{)',
bygroups(Keyword, Whitespace, Name.Class, Whitespace), 'enum'),
- (r'#.*?$', Comment),
+ (r'#.*?$', Comment),
(r'\s+', Whitespace),
- ],
- 'struct': [
- (r'\{', Text, '#push'),
- (r'\}', Text, '#pop'),
+ ],
+ 'struct': [
+ (r'\{', Text, '#push'),
+ (r'\}', Text, '#pop'),
(r'([a-zA-Z0-9]+)(:)(\s*)', bygroups(Name.Attribute, Text, Whitespace), 'typedef'),
(r'\s+', Whitespace),
- ],
- 'union': [
- (r'\)', Text, '#pop'),
+ ],
+ 'union': [
+ (r'\)', Text, '#pop'),
(r'(\s*)(\|)(\s*)', bygroups(Whitespace, Text, Whitespace)),
- (r'[A-Z][a-zA-Z0-9]+', Name.Class),
- (words(keywords), Keyword),
+ (r'[A-Z][a-zA-Z0-9]+', Name.Class),
+ (words(keywords), Keyword),
(r'\s+', Whitespace),
- ],
- 'typedef': [
- (r'\[\]', Text),
- (r'#.*?$', Comment, '#pop'),
- (r'(\[)(\d+)(\])', bygroups(Text, Literal, Text)),
- (r'<|>', Text),
- (r'\(', Text, 'union'),
- (r'(\[)([a-z][a-z-A-Z0-9]+)(\])', bygroups(Text, Keyword, Text)),
- (r'(\[)([A-Z][a-z-A-Z0-9]+)(\])', bygroups(Text, Name.Class, Text)),
- (r'([A-Z][a-z-A-Z0-9]+)', Name.Class),
- (words(keywords), Keyword),
- (r'\n', Text, '#pop'),
- (r'\{', Text, 'struct'),
+ ],
+ 'typedef': [
+ (r'\[\]', Text),
+ (r'#.*?$', Comment, '#pop'),
+ (r'(\[)(\d+)(\])', bygroups(Text, Literal, Text)),
+ (r'<|>', Text),
+ (r'\(', Text, 'union'),
+ (r'(\[)([a-z][a-z-A-Z0-9]+)(\])', bygroups(Text, Keyword, Text)),
+ (r'(\[)([A-Z][a-z-A-Z0-9]+)(\])', bygroups(Text, Name.Class, Text)),
+ (r'([A-Z][a-z-A-Z0-9]+)', Name.Class),
+ (words(keywords), Keyword),
+ (r'\n', Text, '#pop'),
+ (r'\{', Text, 'struct'),
(r'\s+', Whitespace),
- (r'\d+', Literal),
- ],
- 'enum': [
- (r'\{', Text, '#push'),
- (r'\}', Text, '#pop'),
- (r'([A-Z][A-Z0-9_]*)(\s*=\s*)(\d+)', bygroups(Name.Attribute, Text, Literal)),
- (r'([A-Z][A-Z0-9_]*)', bygroups(Name.Attribute)),
- (r'#.*?$', Comment),
+ (r'\d+', Literal),
+ ],
+ 'enum': [
+ (r'\{', Text, '#push'),
+ (r'\}', Text, '#pop'),
+ (r'([A-Z][A-Z0-9_]*)(\s*=\s*)(\d+)', bygroups(Name.Attribute, Text, Literal)),
+ (r'([A-Z][A-Z0-9_]*)', bygroups(Name.Attribute)),
+ (r'#.*?$', Comment),
(r'\s+', Whitespace),
- ],
- }
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/basic.py b/contrib/python/Pygments/py3/pygments/lexers/basic.py
index 3ccadf1e07..a0935fe48c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/basic.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/basic.py
@@ -4,7 +4,7 @@
Lexers for BASIC like languages (other than VB.net).
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -351,10 +351,10 @@ class CbmBasicV2Lexer(RegexLexer):
]
}
- def analyse_text(text):
+ def analyse_text(text):
# if it starts with a line number, it shouldn't be a "modern" Basic
# like VB.net
- if re.match(r'^\d+', text):
+ if re.match(r'^\d+', text):
return 0.2
@@ -522,18 +522,18 @@ class VBScriptLexer(RegexLexer):
(r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float),
(r'\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), # Float variant 2, for example: .1, .1e2
(r'[0-9]+e[+-]?[0-9]+', Number.Float), # Float variant 3, for example: 123e45
- (r'[0-9]+', Number.Integer),
+ (r'[0-9]+', Number.Integer),
('#.+#', String), # date or time value
(r'(dim)(\s+)([a-z_][a-z0-9_]*)',
bygroups(Keyword.Declaration, Whitespace, Name.Variable), 'dim_more'),
(r'(function|sub)(\s+)([a-z_][a-z0-9_]*)',
bygroups(Keyword.Declaration, Whitespace, Name.Function)),
- (r'(class)(\s+)([a-z_][a-z0-9_]*)',
- bygroups(Keyword.Declaration, Whitespace, Name.Class)),
- (r'(const)(\s+)([a-z_][a-z0-9_]*)',
- bygroups(Keyword.Declaration, Whitespace, Name.Constant)),
- (r'(end)(\s+)(class|function|if|property|sub|with)',
- bygroups(Keyword, Whitespace, Keyword)),
+ (r'(class)(\s+)([a-z_][a-z0-9_]*)',
+ bygroups(Keyword.Declaration, Whitespace, Name.Class)),
+ (r'(const)(\s+)([a-z_][a-z0-9_]*)',
+ bygroups(Keyword.Declaration, Whitespace, Name.Constant)),
+ (r'(end)(\s+)(class|function|if|property|sub|with)',
+ bygroups(Keyword, Whitespace, Keyword)),
(r'(on)(\s+)(error)(\s+)(goto)(\s+)(0)',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword, Whitespace, Number.Integer)),
(r'(on)(\s+)(error)(\s+)(resume)(\s+)(next)',
@@ -554,8 +554,8 @@ class VBScriptLexer(RegexLexer):
(r'.+(\n)?', Error)
],
'dim_more': [
- (r'(\s*)(,)(\s*)([a-z_][a-z0-9]*)',
- bygroups(Whitespace, Punctuation, Whitespace, Name.Variable)),
+ (r'(\s*)(,)(\s*)([a-z_][a-z0-9]*)',
+ bygroups(Whitespace, Punctuation, Whitespace, Name.Variable)),
default('#pop'),
],
'string': [
@@ -611,7 +611,7 @@ class BBCBasicLexer(RegexLexer):
(r"[0-9]+", Name.Label),
(r"(\*)([^\n]*)",
bygroups(Keyword.Pseudo, Comment.Special)),
- default('code'),
+ default('code'),
],
'code': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/bibtex.py b/contrib/python/Pygments/py3/pygments/lexers/bibtex.py
index c169468901..adb771ebe9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/bibtex.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/bibtex.py
@@ -4,7 +4,7 @@
Lexers for BibTeX bibliography data and styles
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -26,13 +26,13 @@ class BibTeXLexer(ExtendedRegexLexer):
"""
name = 'BibTeX'
- aliases = ['bibtex', 'bib']
+ aliases = ['bibtex', 'bib']
filenames = ['*.bib']
mimetypes = ["text/x-bibtex"]
flags = re.IGNORECASE
ALLOWED_CHARS = r'@!$&*+\-./:;<>?\[\\\]^`|~'
- IDENTIFIER = '[{}][{}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS)
+ IDENTIFIER = '[{}][{}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS)
def open_brace_callback(self, match, ctx):
opening_brace = match.group()
@@ -154,6 +154,6 @@ class BSTLexer(RegexLexer):
],
'whitespace': [
(r'\s+', Whitespace),
- ('%.*?$', Comment.Single),
+ ('%.*?$', Comment.Single),
],
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/boa.py b/contrib/python/Pygments/py3/pygments/lexers/boa.py
index 8818eaded6..7352dee856 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/boa.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/boa.py
@@ -4,7 +4,7 @@
Lexers for the Boa language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -91,9 +91,9 @@ class BoaLexer(RegexLexer):
(classes, Name.Classes),
(words(operators), Operator),
(r'[][(),;{}\\.]', Punctuation),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"`(\\\\|\\[^\\]|[^`\\])*`", String.Backtick),
- (words(string_sep), String.Delimiter),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"`(\\\\|\\[^\\]|[^`\\])*`", String.Backtick),
+ (words(string_sep), String.Delimiter),
(r'[a-zA-Z_]+', Name.Variable),
(r'[0-9]+', Number.Integer),
(r'\s+', Whitespace), # Whitespace
diff --git a/contrib/python/Pygments/py3/pygments/lexers/business.py b/contrib/python/Pygments/py3/pygments/lexers/business.py
index 47713198ed..18a8eafe67 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/business.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/business.py
@@ -4,7 +4,7 @@
Lexers for "business-oriented" languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -477,8 +477,8 @@ class OpenEdgeLexer(RegexLexer):
(r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
(types, Keyword.Type),
(keywords, Name.Builtin),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'[0-9]+', Number.Integer),
(r'\s+', Whitespace),
@@ -499,26 +499,26 @@ class OpenEdgeLexer(RegexLexer):
],
}
- def analyse_text(text):
- """Try to identify OpenEdge ABL based on a few common constructs."""
- result = 0
-
- if 'END.' in text:
- result += 0.05
-
- if 'END PROCEDURE.' in text:
- result += 0.05
-
- if 'ELSE DO:' in text:
- result += 0.05
-
- return result
-
-
+ def analyse_text(text):
+ """Try to identify OpenEdge ABL based on a few common constructs."""
+ result = 0
+
+ if 'END.' in text:
+ result += 0.05
+
+ if 'END PROCEDURE.' in text:
+ result += 0.05
+
+ if 'ELSE DO:' in text:
+ result += 0.05
+
+ return result
+
+
class GoodDataCLLexer(RegexLexer):
"""
Lexer for `GoodData-CL
- <https://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/\
+ <https://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/\
com/gooddata/processor/COMMANDS.txt>`_
script files.
diff --git a/contrib/python/Pygments/py3/pygments/lexers/c_cpp.py b/contrib/python/Pygments/py3/pygments/lexers/c_cpp.py
index c9d1ed38ea..ee26907c4f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/c_cpp.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/c_cpp.py
@@ -4,7 +4,7 @@
Lexers for C/C++ languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,7 +14,7 @@ from pygments.lexer import RegexLexer, include, bygroups, using, \
this, inherit, default, words
from pygments.util import get_bool_opt
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
+ Number, Punctuation, Whitespace
__all__ = ['CLexer', 'CppLexer']
@@ -29,19 +29,19 @@ class CFamilyLexer(RegexLexer):
#: only one /* */ style comment
_ws1 = r'\s*(?:/[*].*?[*]/\s*)?'
- # Hexadecimal part in an hexadecimal integer/floating-point literal.
- # This includes decimal separators matching.
- _hexpart = r'[0-9a-fA-F](\'?[0-9a-fA-F])*'
- # Decimal part in an decimal integer/floating-point literal.
- # This includes decimal separators matching.
- _decpart = r'\d(\'?\d)*'
- # Integer literal suffix (e.g. 'ull' or 'll').
- _intsuffix = r'(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?'
-
- # Identifier regex with C and C++ Universal Character Name (UCN) support.
+ # Hexadecimal part in an hexadecimal integer/floating-point literal.
+ # This includes decimal separators matching.
+ _hexpart = r'[0-9a-fA-F](\'?[0-9a-fA-F])*'
+ # Decimal part in an decimal integer/floating-point literal.
+ # This includes decimal separators matching.
+ _decpart = r'\d(\'?\d)*'
+ # Integer literal suffix (e.g. 'ull' or 'll').
+ _intsuffix = r'(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?'
+
+ # Identifier regex with C and C++ Universal Character Name (UCN) support.
_ident = r'(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+'
_namespaced_ident = r'(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+'
-
+
tokens = {
'whitespace': [
# preprocessor directives: without whitespace
@@ -54,8 +54,8 @@ class CFamilyLexer(RegexLexer):
bygroups(using(this), Comment.Preproc), 'macro'),
(r'(^[ \t]*)(?!(?:public|private|protected|default)\b)(case\b\s+)?(' + _ident + r')(\s*)(:)(?!:)',
bygroups(Whitespace, using(this), Name.Label, Whitespace, Punctuation)),
- (r'\n', Whitespace),
- (r'[^\S\n]+', Whitespace),
+ (r'\n', Whitespace),
+ (r'[^\S\n]+', Whitespace),
(r'\\\n', Text), # line continuation
(r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
@@ -63,66 +63,66 @@ class CFamilyLexer(RegexLexer):
(r'/(\\\n)?[*][\w\W]*', Comment.Multiline),
],
'statements': [
- include('keywords'),
- include('types'),
- (r'([LuU]|u8)?(")', bygroups(String.Affix, String), 'string'),
- (r"([LuU]|u8)?(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')",
+ include('keywords'),
+ include('types'),
+ (r'([LuU]|u8)?(")', bygroups(String.Affix, String), 'string'),
+ (r"([LuU]|u8)?(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')",
bygroups(String.Affix, String.Char, String.Char, String.Char)),
-
- # Hexadecimal floating-point literals (C11, C++17)
- (r'0[xX](' + _hexpart + r'\.' + _hexpart + r'|\.' + _hexpart + r'|' + _hexpart + r')[pP][+-]?' + _hexpart + r'[lL]?', Number.Float),
-
- (r'(-)?(' + _decpart + r'\.' + _decpart + r'|\.' + _decpart + r'|' + _decpart + r')[eE][+-]?' + _decpart + r'[fFlL]?', Number.Float),
- (r'(-)?((' + _decpart + r'\.(' + _decpart + r')?|\.' + _decpart + r')[fFlL]?)|(' + _decpart + r'[fFlL])', Number.Float),
- (r'(-)?0[xX]' + _hexpart + _intsuffix, Number.Hex),
- (r'(-)?0[bB][01](\'?[01])*' + _intsuffix, Number.Bin),
- (r'(-)?0(\'?[0-7])+' + _intsuffix, Number.Oct),
- (r'(-)?' + _decpart + _intsuffix, Number.Integer),
+
+ # Hexadecimal floating-point literals (C11, C++17)
+ (r'0[xX](' + _hexpart + r'\.' + _hexpart + r'|\.' + _hexpart + r'|' + _hexpart + r')[pP][+-]?' + _hexpart + r'[lL]?', Number.Float),
+
+ (r'(-)?(' + _decpart + r'\.' + _decpart + r'|\.' + _decpart + r'|' + _decpart + r')[eE][+-]?' + _decpart + r'[fFlL]?', Number.Float),
+ (r'(-)?((' + _decpart + r'\.(' + _decpart + r')?|\.' + _decpart + r')[fFlL]?)|(' + _decpart + r'[fFlL])', Number.Float),
+ (r'(-)?0[xX]' + _hexpart + _intsuffix, Number.Hex),
+ (r'(-)?0[bB][01](\'?[01])*' + _intsuffix, Number.Bin),
+ (r'(-)?0(\'?[0-7])+' + _intsuffix, Number.Oct),
+ (r'(-)?' + _decpart + _intsuffix, Number.Integer),
(r'[~!%^&*+=|?:<>/-]', Operator),
(r'[()\[\],.]', Punctuation),
- (r'(true|false|NULL)\b', Name.Builtin),
- (_ident, Name)
- ],
- 'types': [
- (words(('int8', 'int16', 'int32', 'int64', 'wchar_t'), prefix=r'__',
- suffix=r'\b'), Keyword.Reserved),
- (words(('bool', 'int', 'long', 'float', 'short', 'double', 'char',
- 'unsigned', 'signed', 'void'), suffix=r'\b'), Keyword.Type)
- ],
- 'keywords': [
+ (r'(true|false|NULL)\b', Name.Builtin),
+ (_ident, Name)
+ ],
+ 'types': [
+ (words(('int8', 'int16', 'int32', 'int64', 'wchar_t'), prefix=r'__',
+ suffix=r'\b'), Keyword.Reserved),
+ (words(('bool', 'int', 'long', 'float', 'short', 'double', 'char',
+ 'unsigned', 'signed', 'void'), suffix=r'\b'), Keyword.Type)
+ ],
+ 'keywords': [
(r'(struct|union)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
(words(('asm', 'auto', 'break', 'case', 'const', 'continue',
'default', 'do', 'else', 'enum', 'extern', 'for', 'goto',
- 'if', 'register', 'restricted', 'return', 'sizeof', 'struct',
- 'static', 'switch', 'typedef', 'volatile', 'while', 'union',
- 'thread_local', 'alignas', 'alignof', 'static_assert', '_Pragma'),
+ 'if', 'register', 'restricted', 'return', 'sizeof', 'struct',
+ 'static', 'switch', 'typedef', 'volatile', 'while', 'union',
+ 'thread_local', 'alignas', 'alignof', 'static_assert', '_Pragma'),
suffix=r'\b'), Keyword),
(words(('inline', '_inline', '__inline', 'naked', 'restrict',
- 'thread'), suffix=r'\b'), Keyword.Reserved),
+ 'thread'), suffix=r'\b'), Keyword.Reserved),
# Vector intrinsics
(r'(__m(128i|128d|128|64))\b', Keyword.Reserved),
# Microsoft-isms
(words((
- 'asm', 'based', 'except', 'stdcall', 'cdecl',
- 'fastcall', 'declspec', 'finally', 'try',
- 'leave', 'w64', 'unaligned', 'raise', 'noop',
+ 'asm', 'based', 'except', 'stdcall', 'cdecl',
+ 'fastcall', 'declspec', 'finally', 'try',
+ 'leave', 'w64', 'unaligned', 'raise', 'noop',
'identifier', 'forceinline', 'assume'),
- prefix=r'__', suffix=r'\b'), Keyword.Reserved)
+ prefix=r'__', suffix=r'\b'), Keyword.Reserved)
],
'root': [
include('whitespace'),
- include('keywords'),
+ include('keywords'),
# functions
- (r'(' + _namespaced_ident + r'(?:[&*\s])+)' # return arguments
- r'(' + _namespaced_ident + r')' # method name
+ (r'(' + _namespaced_ident + r'(?:[&*\s])+)' # return arguments
+ r'(' + _namespaced_ident + r')' # method name
r'(\s*\([^;]*?\))' # signature
r'([^;{]*)(\{)',
bygroups(using(this), Name.Function, using(this), using(this),
Punctuation),
'function'),
# function declarations
- (r'(' + _namespaced_ident + r'(?:[&*\s])+)' # return arguments
- r'(' + _namespaced_ident + r')' # method name
+ (r'(' + _namespaced_ident + r'(?:[&*\s])+)' # return arguments
+ r'(' + _namespaced_ident + r')' # method name
r'(\s*\([^;]*?\))' # signature
r'([^;]*)(;)',
bygroups(using(this), Name.Function, using(this), using(this),
@@ -133,8 +133,8 @@ class CFamilyLexer(RegexLexer):
'statement': [
include('whitespace'),
include('statements'),
- (r'\}', Punctuation),
- (r'[{;]', Punctuation, '#pop'),
+ (r'\}', Punctuation),
+ (r'[{;]', Punctuation, '#pop'),
],
'function': [
include('whitespace'),
@@ -152,10 +152,10 @@ class CFamilyLexer(RegexLexer):
(r'\\', String), # stray backslash
],
'macro': [
- (r'('+_ws1+r')(include)('+_ws1+r')("[^"]+")([^\n]*)',
- bygroups(using(this), Comment.Preproc, using(this), Comment.PreprocFile, Comment.Single)),
- (r'('+_ws1+r')(include)('+_ws1+r')(<[^>]+>)([^\n]*)',
- bygroups(using(this), Comment.Preproc, using(this), Comment.PreprocFile, Comment.Single)),
+ (r'('+_ws1+r')(include)('+_ws1+r')("[^"]+")([^\n]*)',
+ bygroups(using(this), Comment.Preproc, using(this), Comment.PreprocFile, Comment.Single)),
+ (r'('+_ws1+r')(include)('+_ws1+r')(<[^>]+>)([^\n]*)',
+ bygroups(using(this), Comment.Preproc, using(this), Comment.PreprocFile, Comment.Single)),
(r'[^/\n]+', Comment.Preproc),
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
(r'//.*?\n', Comment.Single, '#pop'),
@@ -168,46 +168,46 @@ class CFamilyLexer(RegexLexer):
(r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
(r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
(r'.*?\n', Comment),
- ],
- 'classname': [
- (_ident, Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Text, '#pop'),
- default('#pop')
+ ],
+ 'classname': [
+ (_ident, Name.Class, '#pop'),
+ # template specification
+ (r'\s*(?=>)', Text, '#pop'),
+ default('#pop')
]
}
- stdlib_types = {
+ stdlib_types = {
'size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t', 'sig_atomic_t', 'fpos_t',
'clock_t', 'time_t', 'va_list', 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t',
- 'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t'}
- c99_types = {
- 'int8_t', 'int16_t', 'int32_t', 'int64_t', 'uint8_t',
+ 'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t'}
+ c99_types = {
+ 'int8_t', 'int16_t', 'int32_t', 'int64_t', 'uint8_t',
'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t', 'int_least16_t',
'int_least32_t', 'int_least64_t', 'uint_least8_t', 'uint_least16_t',
'uint_least32_t', 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t', 'uint_fast64_t',
- 'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t'}
- linux_types = {
+ 'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t'}
+ linux_types = {
'clockid_t', 'cpu_set_t', 'cpumask_t', 'dev_t', 'gid_t', 'id_t', 'ino_t', 'key_t',
'mode_t', 'nfds_t', 'pid_t', 'rlim_t', 'sig_t', 'sighandler_t', 'siginfo_t',
- 'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t'}
- c11_atomic_types = {
- 'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short',
- 'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong',
- 'atomic_llong', 'atomic_ullong', 'atomic_char16_t', 'atomic_char32_t', 'atomic_wchar_t',
- 'atomic_int_least8_t', 'atomic_uint_least8_t', 'atomic_int_least16_t',
- 'atomic_uint_least16_t', 'atomic_int_least32_t', 'atomic_uint_least32_t',
- 'atomic_int_least64_t', 'atomic_uint_least64_t', 'atomic_int_fast8_t',
- 'atomic_uint_fast8_t', 'atomic_int_fast16_t', 'atomic_uint_fast16_t',
- 'atomic_int_fast32_t', 'atomic_uint_fast32_t', 'atomic_int_fast64_t',
- 'atomic_uint_fast64_t', 'atomic_intptr_t', 'atomic_uintptr_t', 'atomic_size_t',
- 'atomic_ptrdiff_t', 'atomic_intmax_t', 'atomic_uintmax_t'}
+ 'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t'}
+ c11_atomic_types = {
+ 'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short',
+ 'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong',
+ 'atomic_llong', 'atomic_ullong', 'atomic_char16_t', 'atomic_char32_t', 'atomic_wchar_t',
+ 'atomic_int_least8_t', 'atomic_uint_least8_t', 'atomic_int_least16_t',
+ 'atomic_uint_least16_t', 'atomic_int_least32_t', 'atomic_uint_least32_t',
+ 'atomic_int_least64_t', 'atomic_uint_least64_t', 'atomic_int_fast8_t',
+ 'atomic_uint_fast8_t', 'atomic_int_fast16_t', 'atomic_uint_fast16_t',
+ 'atomic_int_fast32_t', 'atomic_uint_fast32_t', 'atomic_int_fast64_t',
+ 'atomic_uint_fast64_t', 'atomic_intptr_t', 'atomic_uintptr_t', 'atomic_size_t',
+ 'atomic_ptrdiff_t', 'atomic_intmax_t', 'atomic_uintmax_t'}
def __init__(self, **options):
self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True)
self.c99highlighting = get_bool_opt(options, 'c99highlighting', True)
- self.c11highlighting = get_bool_opt(options, 'c11highlighting', True)
+ self.c11highlighting = get_bool_opt(options, 'c11highlighting', True)
self.platformhighlighting = get_bool_opt(options, 'platformhighlighting', True)
RegexLexer.__init__(self, **options)
@@ -219,8 +219,8 @@ class CFamilyLexer(RegexLexer):
token = Keyword.Type
elif self.c99highlighting and value in self.c99_types:
token = Keyword.Type
- elif self.c11highlighting and value in self.c11_atomic_types:
- token = Keyword.Type
+ elif self.c11highlighting and value in self.c11_atomic_types:
+ token = Keyword.Type
elif self.platformhighlighting and value in self.linux_types:
token = Keyword.Type
yield index, token, value
@@ -229,25 +229,25 @@ class CFamilyLexer(RegexLexer):
class CLexer(CFamilyLexer):
"""
For C source code with preprocessor directives.
-
- Additional options accepted:
-
- `stdlibhighlighting`
- Highlight common types found in the C/C++ standard library (e.g. `size_t`).
- (default: ``True``).
-
- `c99highlighting`
- Highlight common types found in the C99 standard library (e.g. `int8_t`).
- Actually, this includes all fixed-width integer types.
- (default: ``True``).
-
- `c11highlighting`
- Highlight atomic types found in the C11 standard library (e.g. `atomic_bool`).
- (default: ``True``).
-
- `platformhighlighting`
- Highlight common types found in the platform SDK headers (e.g. `clockid_t` on Linux).
- (default: ``True``).
+
+ Additional options accepted:
+
+ `stdlibhighlighting`
+ Highlight common types found in the C/C++ standard library (e.g. `size_t`).
+ (default: ``True``).
+
+ `c99highlighting`
+ Highlight common types found in the C99 standard library (e.g. `int8_t`).
+ Actually, this includes all fixed-width integer types.
+ (default: ``True``).
+
+ `c11highlighting`
+ Highlight atomic types found in the C11 standard library (e.g. `atomic_bool`).
+ (default: ``True``).
+
+ `platformhighlighting`
+ Highlight common types found in the platform SDK headers (e.g. `clockid_t` on Linux).
+ (default: ``True``).
"""
name = 'C'
aliases = ['c']
@@ -255,20 +255,20 @@ class CLexer(CFamilyLexer):
mimetypes = ['text/x-chdr', 'text/x-csrc', 'image/x-xbitmap', 'image/x-xpixmap']
priority = 0.1
- tokens = {
- 'keywords': [
- (words((
- '_Alignas', '_Alignof', '_Noreturn', '_Generic', '_Thread_local',
- '_Static_assert', '_Imaginary', 'noreturn', 'imaginary', 'complex'),
- suffix=r'\b'), Keyword),
- inherit
- ],
- 'types': [
- (words(('_Bool', '_Complex', '_Atomic'), suffix=r'\b'), Keyword.Type),
- inherit
- ]
- }
-
+ tokens = {
+ 'keywords': [
+ (words((
+ '_Alignas', '_Alignof', '_Noreturn', '_Generic', '_Thread_local',
+ '_Static_assert', '_Imaginary', 'noreturn', 'imaginary', 'complex'),
+ suffix=r'\b'), Keyword),
+ inherit
+ ],
+ 'types': [
+ (words(('_Bool', '_Complex', '_Atomic'), suffix=r'\b'), Keyword.Type),
+ inherit
+ ]
+ }
+
def analyse_text(text):
if re.search(r'^\s*#include [<"]', text, re.MULTILINE):
return 0.1
@@ -279,25 +279,25 @@ class CLexer(CFamilyLexer):
class CppLexer(CFamilyLexer):
"""
For C++ source code with preprocessor directives.
-
- Additional options accepted:
-
- `stdlibhighlighting`
- Highlight common types found in the C/C++ standard library (e.g. `size_t`).
- (default: ``True``).
-
- `c99highlighting`
- Highlight common types found in the C99 standard library (e.g. `int8_t`).
- Actually, this includes all fixed-width integer types.
- (default: ``True``).
-
- `c11highlighting`
- Highlight atomic types found in the C11 standard library (e.g. `atomic_bool`).
- (default: ``True``).
-
- `platformhighlighting`
- Highlight common types found in the platform SDK headers (e.g. `clockid_t` on Linux).
- (default: ``True``).
+
+ Additional options accepted:
+
+ `stdlibhighlighting`
+ Highlight common types found in the C/C++ standard library (e.g. `size_t`).
+ (default: ``True``).
+
+ `c99highlighting`
+ Highlight common types found in the C99 standard library (e.g. `int8_t`).
+ Actually, this includes all fixed-width integer types.
+ (default: ``True``).
+
+ `c11highlighting`
+ Highlight atomic types found in the C11 standard library (e.g. `atomic_bool`).
+ (default: ``True``).
+
+ `platformhighlighting`
+ Highlight common types found in the platform SDK headers (e.g. `clockid_t` on Linux).
+ (default: ``True``).
"""
name = 'C++'
aliases = ['cpp', 'c++']
@@ -310,7 +310,7 @@ class CppLexer(CFamilyLexer):
tokens = {
'statements': [
# C++11 raw strings
- (r'((?:[LuU]|u8)?R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")',
+ (r'((?:[LuU]|u8)?R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")',
bygroups(String.Affix, String, String.Delimiter, String.Delimiter,
String, String.Delimiter, String)),
inherit,
@@ -324,41 +324,41 @@ class CppLexer(CFamilyLexer):
# Offload C++ extensions, http://offload.codeplay.com/
(r'__(offload|blockingoffload|outer)\b', Keyword.Pseudo),
],
- 'enumname': [
- include('whitespace'),
- # 'enum class' and 'enum struct' C++11 support
- (words(('class', 'struct'), suffix=r'\b'), Keyword),
- (CFamilyLexer._ident, Name.Class, '#pop'),
+ 'enumname': [
+ include('whitespace'),
+ # 'enum class' and 'enum struct' C++11 support
+ (words(('class', 'struct'), suffix=r'\b'), Keyword),
+ (CFamilyLexer._ident, Name.Class, '#pop'),
# template specification
(r'\s*(?=>)', Text, '#pop'),
- default('#pop')
- ],
- 'keywords': [
+ default('#pop')
+ ],
+ 'keywords': [
(r'(class|concept|typename)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
- (words((
- 'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
- 'export', 'friend', 'mutable', 'new', 'operator',
- 'private', 'protected', 'public', 'reinterpret_cast', 'class',
- 'restrict', 'static_cast', 'template', 'this', 'throw', 'throws',
- 'try', 'typeid', 'using', 'virtual', 'constexpr', 'nullptr', 'concept',
- 'decltype', 'noexcept', 'override', 'final', 'constinit', 'consteval',
- 'co_await', 'co_return', 'co_yield', 'requires', 'import', 'module',
- 'typename'),
- suffix=r'\b'), Keyword),
- (r'namespace\b', Keyword, 'namespace'),
+ (words((
+ 'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
+ 'export', 'friend', 'mutable', 'new', 'operator',
+ 'private', 'protected', 'public', 'reinterpret_cast', 'class',
+ 'restrict', 'static_cast', 'template', 'this', 'throw', 'throws',
+ 'try', 'typeid', 'using', 'virtual', 'constexpr', 'nullptr', 'concept',
+ 'decltype', 'noexcept', 'override', 'final', 'constinit', 'consteval',
+ 'co_await', 'co_return', 'co_yield', 'requires', 'import', 'module',
+ 'typename'),
+ suffix=r'\b'), Keyword),
+ (r'namespace\b', Keyword, 'namespace'),
(r'(enum)(\s+)', bygroups(Keyword, Whitespace), 'enumname'),
- inherit
- ],
- 'types': [
- (r'char(16_t|32_t|8_t)\b', Keyword.Type),
- inherit
- ],
- 'namespace': [
- (r'[;{]', Punctuation, ('#pop', 'root')),
- (r'inline\b', Keyword.Reserved),
- (CFamilyLexer._ident, Name.Namespace),
- include('statement')
- ]
+ inherit
+ ],
+ 'types': [
+ (r'char(16_t|32_t|8_t)\b', Keyword.Type),
+ inherit
+ ],
+ 'namespace': [
+ (r'[;{]', Punctuation, ('#pop', 'root')),
+ (r'inline\b', Keyword.Reserved),
+ (CFamilyLexer._ident, Name.Namespace),
+ include('statement')
+ ]
}
def analyse_text(text):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/c_like.py b/contrib/python/Pygments/py3/pygments/lexers/c_like.py
index ed3864832d..688d316dfc 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/c_like.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/c_like.py
@@ -4,7 +4,7 @@
Lexers for other C-like languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,14 +13,14 @@ import re
from pygments.lexer import RegexLexer, include, bygroups, inherit, words, \
default
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
+ Number, Punctuation, Whitespace
from pygments.lexers.c_cpp import CLexer, CppLexer
from pygments.lexers import _mql_builtins
__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer',
- 'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer', 'CharmciLexer',
- 'OmgIdlLexer']
+ 'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer', 'CharmciLexer',
+ 'OmgIdlLexer']
class PikeLexer(CppLexer):
@@ -172,7 +172,7 @@ class ECLexer(CLexer):
(r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
(r'(null|value|this)\b', Name.Builtin),
inherit,
- ]
+ ]
}
@@ -286,23 +286,23 @@ class CudaLexer(CLexer):
aliases = ['cuda', 'cu']
mimetypes = ['text/x-cuda']
- function_qualifiers = {'__device__', '__global__', '__host__',
- '__noinline__', '__forceinline__'}
- variable_qualifiers = {'__device__', '__constant__', '__shared__',
- '__restrict__'}
- vector_types = {'char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3',
- 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2',
- 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1',
- 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1',
- 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4',
- 'ulong4', 'longlong1', 'ulonglong1', 'longlong2',
- 'ulonglong2', 'float1', 'float2', 'float3', 'float4',
- 'double1', 'double2', 'dim3'}
- variables = {'gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize'}
- functions = {'__threadfence_block', '__threadfence', '__threadfence_system',
- '__syncthreads', '__syncthreads_count', '__syncthreads_and',
- '__syncthreads_or'}
- execution_confs = {'<<<', '>>>'}
+ function_qualifiers = {'__device__', '__global__', '__host__',
+ '__noinline__', '__forceinline__'}
+ variable_qualifiers = {'__device__', '__constant__', '__shared__',
+ '__restrict__'}
+ vector_types = {'char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3',
+ 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2',
+ 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1',
+ 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1',
+ 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4',
+ 'ulong4', 'longlong1', 'ulonglong1', 'longlong2',
+ 'ulonglong2', 'float1', 'float2', 'float3', 'float4',
+ 'double1', 'double2', 'dim3'}
+ variables = {'gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize'}
+ functions = {'__threadfence_block', '__threadfence', '__threadfence_system',
+ '__syncthreads', '__syncthreads_count', '__syncthreads_and',
+ '__syncthreads_or'}
+ execution_confs = {'<<<', '>>>'}
def get_tokens_unprocessed(self, text):
for index, token, value in CLexer.get_tokens_unprocessed(self, text):
@@ -335,11 +335,11 @@ class SwigLexer(CppLexer):
priority = 0.04 # Lower than C/C++ and Objective C/C++
tokens = {
- 'root': [
- # Match it here so it won't be matched as a function in the rest of root
- (r'\$\**\&?\w+', Name),
- inherit
- ],
+ 'root': [
+ # Match it here so it won't be matched as a function in the rest of root
+ (r'\$\**\&?\w+', Name),
+ inherit
+ ],
'statements': [
# SWIG directives
(r'(%[a-z_][a-z0-9_]*)', Name.Function),
@@ -352,7 +352,7 @@ class SwigLexer(CppLexer):
}
# This is a far from complete set of SWIG directives
- swig_directives = {
+ swig_directives = {
# Most common directives
'%apply', '%define', '%director', '%enddef', '%exception', '%extend',
'%feature', '%fragment', '%ignore', '%immutable', '%import', '%include',
@@ -371,7 +371,7 @@ class SwigLexer(CppLexer):
'%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall',
'%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof',
'%trackobjects', '%types', '%unrefobject', '%varargs', '%warn',
- '%warnfilter'}
+ '%warnfilter'}
def analyse_text(text):
rv = 0
@@ -429,13 +429,13 @@ class ArduinoLexer(CppLexer):
mimetypes = ['text/x-arduino']
# Language sketch main structure functions
- structure = {'setup', 'loop'}
+ structure = {'setup', 'loop'}
# Language operators
- operators = {'not', 'or', 'and', 'xor'}
+ operators = {'not', 'or', 'and', 'xor'}
# Language 'variables'
- variables = {
+ variables = {
'DIGITAL_MESSAGE', 'FIRMATA_STRING', 'ANALOG_MESSAGE', 'REPORT_DIGITAL',
'REPORT_ANALOG', 'INPUT_PULLUP', 'SET_PIN_MODE', 'INTERNAL2V56', 'SYSTEM_RESET',
'LED_BUILTIN', 'INTERNAL1V1', 'SYSEX_START', 'INTERNAL', 'EXTERNAL', 'HIGH',
@@ -452,10 +452,10 @@ class ArduinoLexer(CppLexer):
'signed', 'inline', 'delete', '_Bool', 'complex', '_Complex', '_Imaginary',
'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short',
'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong',
- 'atomic_llong', 'atomic_ullong', 'PROGMEM'}
+ 'atomic_llong', 'atomic_ullong', 'PROGMEM'}
# Language shipped functions and class ( )
- functions = {
+ functions = {
'KeyboardController', 'MouseController', 'SoftwareSerial', 'EthernetServer',
'EthernetClient', 'LiquidCrystal', 'RobotControl', 'GSMVoiceCall',
'EthernetUDP', 'EsploraTFT', 'HttpClient', 'RobotMotor', 'WiFiClient',
@@ -517,13 +517,13 @@ class ArduinoLexer(CppLexer):
'cos', 'sin', 'pow', 'map', 'abs', 'max', 'min', 'get', 'run', 'put',
'isAlphaNumeric', 'isAlpha', 'isAscii', 'isWhitespace', 'isControl', 'isDigit',
'isGraph', 'isLowerCase', 'isPrintable', 'isPunct', 'isSpace', 'isUpperCase',
- 'isHexadecimalDigit'}
+ 'isHexadecimalDigit'}
# do not highlight
- suppress_highlight = {
+ suppress_highlight = {
'namespace', 'template', 'mutable', 'using', 'asm', 'typeid',
'typename', 'this', 'alignof', 'constexpr', 'decltype', 'noexcept',
- 'static_assert', 'thread_local', 'restrict'}
+ 'static_assert', 'thread_local', 'restrict'}
def get_tokens_unprocessed(self, text):
for index, token, value in CppLexer.get_tokens_unprocessed(self, text):
@@ -555,7 +555,7 @@ class CharmciLexer(CppLexer):
mimetypes = []
tokens = {
- 'keywords': [
+ 'keywords': [
(r'(module)(\s+)', bygroups(Keyword, Text), 'classname'),
(words(('mainmodule', 'mainchare', 'chare', 'array', 'group',
'nodegroup', 'message', 'conditional')), Keyword),
@@ -569,96 +569,96 @@ class CharmciLexer(CppLexer):
inherit,
],
}
-
-
-class OmgIdlLexer(CLexer):
- """
- Lexer for `Object Management Group Interface Definition Language <https://www.omg.org/spec/IDL/About-IDL/>`_.
-
- .. versionadded:: 2.9
- """
-
- name = 'OMG Interface Definition Language'
- aliases = ['omg-idl']
- filenames = ['*.idl', '*.pidl']
- mimetypes = []
-
- scoped_name = r'((::)?\w+)+'
-
- tokens = {
- 'values': [
- (words(('true', 'false'), prefix=r'(?i)', suffix=r'\b'), Number),
- (r'([Ll]?)(")', bygroups(String.Affix, String.Double), 'string'),
- (r'([Ll]?)(\')(\\[^\']+)(\')',
- bygroups(String.Affix, String.Char, String.Escape, String.Char)),
- (r'([Ll]?)(\')(\\\')(\')',
- bygroups(String.Affix, String.Char, String.Escape, String.Char)),
- (r'([Ll]?)(\'.\')', bygroups(String.Affix, String.Char)),
- (r'[+-]?\d+(\.\d*)?[Ee][+-]?\d+', Number.Float),
- (r'[+-]?(\d+\.\d*)|(\d*\.\d+)([Ee][+-]?\d+)?', Number.Float),
- (r'(?i)[+-]?0x[0-9a-f]+', Number.Hex),
- (r'[+-]?[1-9]\d*', Number.Integer),
- (r'[+-]?0[0-7]*', Number.Oct),
- (r'[\+\-\*\/%^&\|~]', Operator),
- (words(('<<', '>>')), Operator),
- (scoped_name, Name),
- (r'[{};:,<>\[\]]', Punctuation),
- ],
- 'annotation_params': [
- include('whitespace'),
- (r'\(', Punctuation, '#push'),
- include('values'),
- (r'=', Punctuation),
- (r'\)', Punctuation, '#pop'),
- ],
- 'annotation_params_maybe': [
- (r'\(', Punctuation, 'annotation_params'),
- include('whitespace'),
- default('#pop'),
- ],
- 'annotation_appl': [
- (r'@' + scoped_name, Name.Decorator, 'annotation_params_maybe'),
- ],
- 'enum': [
- include('whitespace'),
- (r'[{,]', Punctuation),
- (r'\w+', Name.Constant),
- include('annotation_appl'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'root': [
- include('whitespace'),
- (words((
- 'typedef', 'const',
- 'in', 'out', 'inout', 'local',
- ), prefix=r'(?i)', suffix=r'\b'), Keyword.Declaration),
- (words((
- 'void', 'any', 'native', 'bitfield',
- 'unsigned', 'boolean', 'char', 'wchar', 'octet', 'short', 'long',
- 'int8', 'uint8', 'int16', 'int32', 'int64', 'uint16', 'uint32', 'uint64',
- 'float', 'double', 'fixed',
- 'sequence', 'string', 'wstring', 'map',
- ), prefix=r'(?i)', suffix=r'\b'), Keyword.Type),
- (words((
- '@annotation', 'struct', 'union', 'bitset', 'interface',
- 'exception', 'valuetype', 'eventtype', 'component',
- ), prefix=r'(?i)', suffix=r'(\s+)(\w+)'), bygroups(Keyword, Whitespace, Name.Class)),
- (words((
- 'abstract', 'alias', 'attribute', 'case', 'connector',
- 'consumes', 'context', 'custom', 'default', 'emits', 'factory',
- 'finder', 'getraises', 'home', 'import', 'manages', 'mirrorport',
- 'multiple', 'Object', 'oneway', 'primarykey', 'private', 'port',
- 'porttype', 'provides', 'public', 'publishes', 'raises',
- 'readonly', 'setraises', 'supports', 'switch', 'truncatable',
- 'typeid', 'typename', 'typeprefix', 'uses', 'ValueBase',
- ), prefix=r'(?i)', suffix=r'\b'), Keyword),
- (r'(?i)(enum|bitmask)(\s+)(\w+)',
- bygroups(Keyword, Whitespace, Name.Class), 'enum'),
- (r'(?i)(module)(\s+)(\w+)',
- bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
- (r'(\w+)(\s*)(=)', bygroups(Name.Constant, Whitespace, Operator)),
- (r'[\(\)]', Punctuation),
- include('values'),
- include('annotation_appl'),
- ],
- }
+
+
+class OmgIdlLexer(CLexer):
+ """
+ Lexer for `Object Management Group Interface Definition Language <https://www.omg.org/spec/IDL/About-IDL/>`_.
+
+ .. versionadded:: 2.9
+ """
+
+ name = 'OMG Interface Definition Language'
+ aliases = ['omg-idl']
+ filenames = ['*.idl', '*.pidl']
+ mimetypes = []
+
+ scoped_name = r'((::)?\w+)+'
+
+ tokens = {
+ 'values': [
+ (words(('true', 'false'), prefix=r'(?i)', suffix=r'\b'), Number),
+ (r'([Ll]?)(")', bygroups(String.Affix, String.Double), 'string'),
+ (r'([Ll]?)(\')(\\[^\']+)(\')',
+ bygroups(String.Affix, String.Char, String.Escape, String.Char)),
+ (r'([Ll]?)(\')(\\\')(\')',
+ bygroups(String.Affix, String.Char, String.Escape, String.Char)),
+ (r'([Ll]?)(\'.\')', bygroups(String.Affix, String.Char)),
+ (r'[+-]?\d+(\.\d*)?[Ee][+-]?\d+', Number.Float),
+ (r'[+-]?(\d+\.\d*)|(\d*\.\d+)([Ee][+-]?\d+)?', Number.Float),
+ (r'(?i)[+-]?0x[0-9a-f]+', Number.Hex),
+ (r'[+-]?[1-9]\d*', Number.Integer),
+ (r'[+-]?0[0-7]*', Number.Oct),
+ (r'[\+\-\*\/%^&\|~]', Operator),
+ (words(('<<', '>>')), Operator),
+ (scoped_name, Name),
+ (r'[{};:,<>\[\]]', Punctuation),
+ ],
+ 'annotation_params': [
+ include('whitespace'),
+ (r'\(', Punctuation, '#push'),
+ include('values'),
+ (r'=', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ 'annotation_params_maybe': [
+ (r'\(', Punctuation, 'annotation_params'),
+ include('whitespace'),
+ default('#pop'),
+ ],
+ 'annotation_appl': [
+ (r'@' + scoped_name, Name.Decorator, 'annotation_params_maybe'),
+ ],
+ 'enum': [
+ include('whitespace'),
+ (r'[{,]', Punctuation),
+ (r'\w+', Name.Constant),
+ include('annotation_appl'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'root': [
+ include('whitespace'),
+ (words((
+ 'typedef', 'const',
+ 'in', 'out', 'inout', 'local',
+ ), prefix=r'(?i)', suffix=r'\b'), Keyword.Declaration),
+ (words((
+ 'void', 'any', 'native', 'bitfield',
+ 'unsigned', 'boolean', 'char', 'wchar', 'octet', 'short', 'long',
+ 'int8', 'uint8', 'int16', 'int32', 'int64', 'uint16', 'uint32', 'uint64',
+ 'float', 'double', 'fixed',
+ 'sequence', 'string', 'wstring', 'map',
+ ), prefix=r'(?i)', suffix=r'\b'), Keyword.Type),
+ (words((
+ '@annotation', 'struct', 'union', 'bitset', 'interface',
+ 'exception', 'valuetype', 'eventtype', 'component',
+ ), prefix=r'(?i)', suffix=r'(\s+)(\w+)'), bygroups(Keyword, Whitespace, Name.Class)),
+ (words((
+ 'abstract', 'alias', 'attribute', 'case', 'connector',
+ 'consumes', 'context', 'custom', 'default', 'emits', 'factory',
+ 'finder', 'getraises', 'home', 'import', 'manages', 'mirrorport',
+ 'multiple', 'Object', 'oneway', 'primarykey', 'private', 'port',
+ 'porttype', 'provides', 'public', 'publishes', 'raises',
+ 'readonly', 'setraises', 'supports', 'switch', 'truncatable',
+ 'typeid', 'typename', 'typeprefix', 'uses', 'ValueBase',
+ ), prefix=r'(?i)', suffix=r'\b'), Keyword),
+ (r'(?i)(enum|bitmask)(\s+)(\w+)',
+ bygroups(Keyword, Whitespace, Name.Class), 'enum'),
+ (r'(?i)(module)(\s+)(\w+)',
+ bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
+ (r'(\w+)(\s*)(=)', bygroups(Name.Constant, Whitespace, Operator)),
+ (r'[\(\)]', Punctuation),
+ include('values'),
+ include('annotation_appl'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/capnproto.py b/contrib/python/Pygments/py3/pygments/lexers/capnproto.py
index 0363eca470..d6c7270ce6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/capnproto.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/capnproto.py
@@ -4,7 +4,7 @@
Lexers for the Cap'n Proto schema language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/cddl.py b/contrib/python/Pygments/py3/pygments/lexers/cddl.py
index 5c949802e3..8229e7e01b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/cddl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/cddl.py
@@ -1,192 +1,192 @@
-"""
- pygments.lexers.cddl
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Concise data definition language (CDDL), a notational
- convention to express CBOR and JSON data structures.
-
- More information:
- https://datatracker.ietf.org/doc/rfc8610/
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-__all__ = ['CddlLexer']
-
-from pygments.lexer import RegexLexer, bygroups, include, words
-from pygments.token import (
- Comment,
- Error,
- Keyword,
- Name,
- Number,
- Operator,
- Punctuation,
- String,
- Text,
+"""
+ pygments.lexers.cddl
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Concise data definition language (CDDL), a notational
+ convention to express CBOR and JSON data structures.
+
+ More information:
+ https://datatracker.ietf.org/doc/rfc8610/
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+__all__ = ['CddlLexer']
+
+from pygments.lexer import RegexLexer, bygroups, include, words
+from pygments.token import (
+ Comment,
+ Error,
+ Keyword,
+ Name,
+ Number,
+ Operator,
+ Punctuation,
+ String,
+ Text,
Whitespace,
-)
-
-
-class CddlLexer(RegexLexer):
- """
- Lexer for CDDL definitions.
-
- .. versionadded:: 2.8
- """
- name = "CDDL"
- aliases = ["cddl"]
- filenames = ["*.cddl"]
- mimetypes = ["text/x-cddl"]
-
- _prelude_types = [
- "any",
- "b64legacy",
- "b64url",
- "bigfloat",
- "bigint",
- "bignint",
- "biguint",
- "bool",
- "bstr",
- "bytes",
- "cbor-any",
- "decfrac",
- "eb16",
- "eb64legacy",
- "eb64url",
- "encoded-cbor",
- "false",
- "float",
- "float16",
- "float16-32",
- "float32",
- "float32-64",
- "float64",
- "int",
- "integer",
- "mime-message",
- "nil",
- "nint",
- "null",
- "number",
- "regexp",
- "tdate",
- "text",
- "time",
- "true",
- "tstr",
- "uint",
- "undefined",
- "unsigned",
- "uri",
- ]
-
- _controls = [
- ".and",
- ".bits",
- ".cbor",
- ".cborseq",
- ".default",
- ".eq",
- ".ge",
- ".gt",
- ".le",
- ".lt",
- ".ne",
- ".regexp",
- ".size",
- ".within",
- ]
-
- _re_id = (
- r"[$@A-Z_a-z]"
+)
+
+
+class CddlLexer(RegexLexer):
+ """
+ Lexer for CDDL definitions.
+
+ .. versionadded:: 2.8
+ """
+ name = "CDDL"
+ aliases = ["cddl"]
+ filenames = ["*.cddl"]
+ mimetypes = ["text/x-cddl"]
+
+ _prelude_types = [
+ "any",
+ "b64legacy",
+ "b64url",
+ "bigfloat",
+ "bigint",
+ "bignint",
+ "biguint",
+ "bool",
+ "bstr",
+ "bytes",
+ "cbor-any",
+ "decfrac",
+ "eb16",
+ "eb64legacy",
+ "eb64url",
+ "encoded-cbor",
+ "false",
+ "float",
+ "float16",
+ "float16-32",
+ "float32",
+ "float32-64",
+ "float64",
+ "int",
+ "integer",
+ "mime-message",
+ "nil",
+ "nint",
+ "null",
+ "number",
+ "regexp",
+ "tdate",
+ "text",
+ "time",
+ "true",
+ "tstr",
+ "uint",
+ "undefined",
+ "unsigned",
+ "uri",
+ ]
+
+ _controls = [
+ ".and",
+ ".bits",
+ ".cbor",
+ ".cborseq",
+ ".default",
+ ".eq",
+ ".ge",
+ ".gt",
+ ".le",
+ ".lt",
+ ".ne",
+ ".regexp",
+ ".size",
+ ".within",
+ ]
+
+ _re_id = (
+ r"[$@A-Z_a-z]"
r"(?:[\-\.]+(?=[$@0-9A-Z_a-z])|[$@0-9A-Z_a-z])*"
- )
-
- # While the spec reads more like "an int must not start with 0" we use a
- # lookahead here that says "after a 0 there must be no digit". This makes the
- # '0' the invalid character in '01', which looks nicer when highlighted.
- _re_uint = r"(?:0b[01]+|0x[0-9a-fA-F]+|[1-9]\d*|0(?!\d))"
- _re_int = r"-?" + _re_uint
-
- flags = re.UNICODE | re.MULTILINE
-
- tokens = {
+ )
+
+ # While the spec reads more like "an int must not start with 0" we use a
+ # lookahead here that says "after a 0 there must be no digit". This makes the
+ # '0' the invalid character in '01', which looks nicer when highlighted.
+ _re_uint = r"(?:0b[01]+|0x[0-9a-fA-F]+|[1-9]\d*|0(?!\d))"
+ _re_int = r"-?" + _re_uint
+
+ flags = re.UNICODE | re.MULTILINE
+
+ tokens = {
"commentsandwhitespace": [(r"\s+", Whitespace), (r";.+$", Comment.Single)],
- "root": [
- include("commentsandwhitespace"),
- # tag types
- (r"#(\d\.{uint})?".format(uint=_re_uint), Keyword.Type), # type or any
- # occurence
- (
- r"({uint})?(\*)({uint})?".format(uint=_re_uint),
- bygroups(Number, Operator, Number),
- ),
- (r"\?|\+", Operator), # occurrence
- (r"\^", Operator), # cuts
- (r"(\.\.\.|\.\.)", Operator), # rangeop
- (words(_controls, suffix=r"\b"), Operator.Word), # ctlops
- # into choice op
- (r"&(?=\s*({groupname}|\())".format(groupname=_re_id), Operator),
- (r"~(?=\s*{})".format(_re_id), Operator), # unwrap op
- (r"//|/(?!/)", Operator), # double und single slash
- (r"=>|/==|/=|=", Operator),
- (r"[\[\]{}\(\),<>:]", Punctuation),
- # Bytestrings
- (r"(b64)(')", bygroups(String.Affix, String.Single), "bstrb64url"),
- (r"(h)(')", bygroups(String.Affix, String.Single), "bstrh"),
- (r"'", String.Single, "bstr"),
- # Barewords as member keys (must be matched before values, types, typenames,
- # groupnames).
- # Token type is String as barewords are always interpreted as such.
- (
- r"({bareword})(\s*)(:)".format(bareword=_re_id),
+ "root": [
+ include("commentsandwhitespace"),
+ # tag types
+ (r"#(\d\.{uint})?".format(uint=_re_uint), Keyword.Type), # type or any
+ # occurence
+ (
+ r"({uint})?(\*)({uint})?".format(uint=_re_uint),
+ bygroups(Number, Operator, Number),
+ ),
+ (r"\?|\+", Operator), # occurrence
+ (r"\^", Operator), # cuts
+ (r"(\.\.\.|\.\.)", Operator), # rangeop
+ (words(_controls, suffix=r"\b"), Operator.Word), # ctlops
+ # into choice op
+ (r"&(?=\s*({groupname}|\())".format(groupname=_re_id), Operator),
+ (r"~(?=\s*{})".format(_re_id), Operator), # unwrap op
+ (r"//|/(?!/)", Operator), # double und single slash
+ (r"=>|/==|/=|=", Operator),
+ (r"[\[\]{}\(\),<>:]", Punctuation),
+ # Bytestrings
+ (r"(b64)(')", bygroups(String.Affix, String.Single), "bstrb64url"),
+ (r"(h)(')", bygroups(String.Affix, String.Single), "bstrh"),
+ (r"'", String.Single, "bstr"),
+ # Barewords as member keys (must be matched before values, types, typenames,
+ # groupnames).
+ # Token type is String as barewords are always interpreted as such.
+ (
+ r"({bareword})(\s*)(:)".format(bareword=_re_id),
bygroups(String, Whitespace, Punctuation),
- ),
- # predefined types
- (
- words(_prelude_types, prefix=r"(?![\-_$@])\b", suffix=r"\b(?![\-_$@])"),
- Name.Builtin,
- ),
- # user-defined groupnames, typenames
- (_re_id, Name.Class),
- # values
- (r"0b[01]+", Number.Bin),
- (r"0o[0-7]+", Number.Oct),
- (r"0x[0-9a-fA-F]+(\.[0-9a-fA-F]+)?p[+-]?\d+", Number.Hex), # hexfloat
- (r"0x[0-9a-fA-F]+", Number.Hex), # hex
- # Float
- (
- r"{int}(?=(\.\d|e[+-]?\d))(?:\.\d+)?(?:e[+-]?\d+)?".format(int=_re_int),
- Number.Float,
- ),
- # Int
- (_re_int, Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- ],
- "bstrb64url": [
- (r"'", String.Single, "#pop"),
- include("commentsandwhitespace"),
- (r"\\.", String.Escape),
- (r"[0-9a-zA-Z\-_=]+", String.Single),
- (r".", Error),
- # (r";.+$", Token.Other),
- ],
- "bstrh": [
- (r"'", String.Single, "#pop"),
- include("commentsandwhitespace"),
- (r"\\.", String.Escape),
- (r"[0-9a-fA-F]+", String.Single),
- (r".", Error),
- ],
- "bstr": [
- (r"'", String.Single, "#pop"),
- (r"\\.", String.Escape),
+ ),
+ # predefined types
+ (
+ words(_prelude_types, prefix=r"(?![\-_$@])\b", suffix=r"\b(?![\-_$@])"),
+ Name.Builtin,
+ ),
+ # user-defined groupnames, typenames
+ (_re_id, Name.Class),
+ # values
+ (r"0b[01]+", Number.Bin),
+ (r"0o[0-7]+", Number.Oct),
+ (r"0x[0-9a-fA-F]+(\.[0-9a-fA-F]+)?p[+-]?\d+", Number.Hex), # hexfloat
+ (r"0x[0-9a-fA-F]+", Number.Hex), # hex
+ # Float
+ (
+ r"{int}(?=(\.\d|e[+-]?\d))(?:\.\d+)?(?:e[+-]?\d+)?".format(int=_re_int),
+ Number.Float,
+ ),
+ # Int
+ (_re_int, Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ ],
+ "bstrb64url": [
+ (r"'", String.Single, "#pop"),
+ include("commentsandwhitespace"),
+ (r"\\.", String.Escape),
+ (r"[0-9a-zA-Z\-_=]+", String.Single),
+ (r".", Error),
+ # (r";.+$", Token.Other),
+ ],
+ "bstrh": [
+ (r"'", String.Single, "#pop"),
+ include("commentsandwhitespace"),
+ (r"\\.", String.Escape),
+ (r"[0-9a-fA-F]+", String.Single),
+ (r".", Error),
+ ],
+ "bstr": [
+ (r"'", String.Single, "#pop"),
+ (r"\\.", String.Escape),
(r"[^'\\]+", String.Single),
- ],
- }
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/chapel.py b/contrib/python/Pygments/py3/pygments/lexers/chapel.py
index ad25981d8c..3ad3bc4044 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/chapel.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/chapel.py
@@ -4,7 +4,7 @@
Lexer for the Chapel language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,7 +17,7 @@ __all__ = ['ChapelLexer']
class ChapelLexer(RegexLexer):
"""
- For `Chapel <https://chapel-lang.org/>`_ source.
+ For `Chapel <https://chapel-lang.org/>`_ source.
.. versionadded:: 2.0
"""
@@ -26,38 +26,38 @@ class ChapelLexer(RegexLexer):
aliases = ['chapel', 'chpl']
# mimetypes = ['text/x-chapel']
- known_types = ('bool', 'bytes', 'complex', 'imag', 'int', 'locale',
- 'nothing', 'opaque', 'range', 'real', 'string', 'uint',
- 'void')
-
- type_modifiers_par = ('atomic', 'single', 'sync')
- type_modifiers_mem = ('borrowed', 'owned', 'shared', 'unmanaged')
- type_modifiers = (*type_modifiers_par, *type_modifiers_mem)
-
- declarations = ('config', 'const', 'in', 'inout', 'out', 'param', 'ref',
- 'type', 'var')
-
- constants = ('false', 'nil', 'none', 'true')
-
- other_keywords = ('align', 'as',
- 'begin', 'break', 'by',
- 'catch', 'cobegin', 'coforall', 'continue',
- 'defer', 'delete', 'dmapped', 'do', 'domain',
- 'else', 'enum', 'except', 'export', 'extern',
- 'for', 'forall', 'foreach', 'forwarding',
- 'if', 'implements', 'import', 'index', 'init', 'inline',
- 'label', 'lambda', 'let', 'lifetime', 'local',
- 'new', 'noinit',
- 'on', 'only', 'otherwise', 'override',
- 'pragma', 'primitive', 'private', 'prototype', 'public',
- 'reduce', 'require', 'return',
- 'scan', 'select', 'serial', 'sparse', 'subdomain',
- 'then', 'this', 'throw', 'throws', 'try',
- 'use',
- 'when', 'where', 'while', 'with',
- 'yield',
- 'zip')
-
+ known_types = ('bool', 'bytes', 'complex', 'imag', 'int', 'locale',
+ 'nothing', 'opaque', 'range', 'real', 'string', 'uint',
+ 'void')
+
+ type_modifiers_par = ('atomic', 'single', 'sync')
+ type_modifiers_mem = ('borrowed', 'owned', 'shared', 'unmanaged')
+ type_modifiers = (*type_modifiers_par, *type_modifiers_mem)
+
+ declarations = ('config', 'const', 'in', 'inout', 'out', 'param', 'ref',
+ 'type', 'var')
+
+ constants = ('false', 'nil', 'none', 'true')
+
+ other_keywords = ('align', 'as',
+ 'begin', 'break', 'by',
+ 'catch', 'cobegin', 'coforall', 'continue',
+ 'defer', 'delete', 'dmapped', 'do', 'domain',
+ 'else', 'enum', 'except', 'export', 'extern',
+ 'for', 'forall', 'foreach', 'forwarding',
+ 'if', 'implements', 'import', 'index', 'init', 'inline',
+ 'label', 'lambda', 'let', 'lifetime', 'local',
+ 'new', 'noinit',
+ 'on', 'only', 'otherwise', 'override',
+ 'pragma', 'primitive', 'private', 'prototype', 'public',
+ 'reduce', 'require', 'return',
+ 'scan', 'select', 'serial', 'sparse', 'subdomain',
+ 'then', 'this', 'throw', 'throws', 'try',
+ 'use',
+ 'when', 'where', 'while', 'with',
+ 'yield',
+ 'zip')
+
tokens = {
'root': [
(r'\n', Whitespace),
@@ -67,11 +67,11 @@ class ChapelLexer(RegexLexer):
(r'//(.*?)\n', Comment.Single),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (words(declarations, suffix=r'\b'), Keyword.Declaration),
- (words(constants, suffix=r'\b'), Keyword.Constant),
- (words(known_types, suffix=r'\b'), Keyword.Type),
- (words((*type_modifiers, *other_keywords), suffix=r'\b'), Keyword),
-
+ (words(declarations, suffix=r'\b'), Keyword.Declaration),
+ (words(constants, suffix=r'\b'), Keyword.Constant),
+ (words(known_types, suffix=r'\b'), Keyword.Type),
+ (words((*type_modifiers, *other_keywords), suffix=r'\b'), Keyword),
+
(r'(iter)(\s+)', bygroups(Keyword, Whitespace), 'procname'),
(r'(proc)(\s+)', bygroups(Keyword, Whitespace), 'procname'),
(r'(operator)(\s+)', bygroups(Keyword, Whitespace), 'procname'),
@@ -100,8 +100,8 @@ class ChapelLexer(RegexLexer):
(r'[0-9]+', Number.Integer),
# strings
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'(\\\\|\\'|[^'])*'", String),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'(\\\\|\\'|[^'])*'", String),
# tokens
(r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|'
@@ -118,18 +118,18 @@ class ChapelLexer(RegexLexer):
(r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
],
'procname': [
- (r'([a-zA-Z_][.\w$]*|' # regular function name, including secondary
- r'\~[a-zA-Z_][.\w$]*|' # support for legacy destructors
- r'[+*/!~%<>=&^|\-:]{1,2})', # operators
+ (r'([a-zA-Z_][.\w$]*|' # regular function name, including secondary
+ r'\~[a-zA-Z_][.\w$]*|' # support for legacy destructors
+ r'[+*/!~%<>=&^|\-:]{1,2})', # operators
Name.Function, '#pop'),
-
- # allow `proc (atomic T).foo`
- (r'\(', Punctuation, "receivertype"),
- (r'\)+\.', Punctuation),
- ],
- 'receivertype': [
- (words(type_modifiers, suffix=r'\b'), Keyword),
- (words(known_types, suffix=r'\b'), Keyword.Type),
- (r'[^()]*', Name.Other, '#pop'),
+
+ # allow `proc (atomic T).foo`
+ (r'\(', Punctuation, "receivertype"),
+ (r'\)+\.', Punctuation),
],
+ 'receivertype': [
+ (words(type_modifiers, suffix=r'\b'), Keyword),
+ (words(known_types, suffix=r'\b'), Keyword.Type),
+ (r'[^()]*', Name.Other, '#pop'),
+ ],
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/clean.py b/contrib/python/Pygments/py3/pygments/lexers/clean.py
index 579cf7c30d..a758730c0d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/clean.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/clean.py
@@ -4,11 +4,11 @@
Lexer for the Clean language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-from pygments.lexer import ExtendedRegexLexer, words, default, include, bygroups
+from pygments.lexer import ExtendedRegexLexer, words, default, include, bygroups
from pygments.token import Comment, Error, Keyword, Literal, Name, Number, \
Operator, Punctuation, String, Whitespace
@@ -34,9 +34,9 @@ class CleanLexer(ExtendedRegexLexer):
modulewords = ('implementation', 'definition', 'system')
- lowerId = r'[a-z`][\w`]*'
- upperId = r'[A-Z`][\w`]*'
- funnyId = r'[~@#$%\^?!+\-*<>\\/|&=:]+'
+ lowerId = r'[a-z`][\w`]*'
+ upperId = r'[A-Z`][\w`]*'
+ funnyId = r'[~@#$%\^?!+\-*<>\\/|&=:]+'
scoreUpperId = r'_' + upperId
scoreLowerId = r'_' + lowerId
moduleId = r'[a-zA-Z_][a-zA-Z0-9_.`]+'
@@ -59,15 +59,15 @@ class CleanLexer(ExtendedRegexLexer):
],
'comments': [
(r'//.*\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comments.in'),
+ (r'/\*', Comment.Multiline, 'comments.in'),
(r'/\*\*', Comment.Special, 'comments.in'),
],
'comments.in': [
- (r'\*\/', Comment.Multiline, '#pop'),
- (r'/\*', Comment.Multiline, '#push'),
- (r'[^*/]+', Comment.Multiline),
- (r'\*(?!/)', Comment.Multiline),
- (r'/', Comment.Multiline),
+ (r'\*\/', Comment.Multiline, '#pop'),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'[^*/]+', Comment.Multiline),
+ (r'\*(?!/)', Comment.Multiline),
+ (r'/', Comment.Multiline),
],
'keywords': [
(words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
@@ -91,8 +91,8 @@ class CleanLexer(ExtendedRegexLexer):
(r'(\s*)\b(as)\b', bygroups(Whitespace, Keyword), ('#pop', 'import.module.as')),
(moduleId, Name.Class),
(r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace)),
- (r'\s+', Whitespace),
- default('#pop'),
+ (r'\s+', Whitespace),
+ default('#pop'),
],
'import.module.as': [
include('whitespace'),
@@ -160,7 +160,7 @@ class CleanLexer(ExtendedRegexLexer):
(r'[$\n]', Error, '#pop'),
],
'operators': [
- (r'[-~@#$%\^?!+*<>\\/|&=:.]+', Operator),
+ (r'[-~@#$%\^?!+*<>\\/|&=:.]+', Operator),
(r'\b_+\b', Operator),
],
'delimiters': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/compiled.py b/contrib/python/Pygments/py3/pygments/lexers/compiled.py
index 13aa39ce2d..31642834f6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/compiled.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/compiled.py
@@ -4,7 +4,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/configs.py b/contrib/python/Pygments/py3/pygments/lexers/configs.py
index 99fab14860..acb3f5ffb0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/configs.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/configs.py
@@ -4,7 +4,7 @@
Lexers for configuration file formats.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,8 +21,8 @@ __all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer',
'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer',
'TerraformLexer', 'TermcapLexer', 'TerminfoLexer',
- 'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer',
- 'NestedTextLexer', 'SingularityLexer']
+ 'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer',
+ 'NestedTextLexer', 'SingularityLexer']
class IniLexer(RegexLexer):
@@ -32,13 +32,13 @@ class IniLexer(RegexLexer):
name = 'INI'
aliases = ['ini', 'cfg', 'dosini']
- filenames = [
+ filenames = [
'*.ini', '*.cfg', '*.inf', '.editorconfig',
- # systemd unit files
- # https://www.freedesktop.org/software/systemd/man/systemd.unit.html
- '*.service', '*.socket', '*.device', '*.mount', '*.automount',
- '*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope',
- ]
+ # systemd unit files
+ # https://www.freedesktop.org/software/systemd/man/systemd.unit.html
+ '*.service', '*.socket', '*.device', '*.mount', '*.automount',
+ '*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope',
+ ]
mimetypes = ['text/x-ini', 'text/inf']
tokens = {
@@ -46,7 +46,7 @@ class IniLexer(RegexLexer):
(r'\s+', Whitespace),
(r'[;#].*', Comment.Single),
(r'\[.*?\]$', Keyword),
- (r'(.*?)([ \t]*)(=)([ \t]*)([^\t\n]*)',
+ (r'(.*?)([ \t]*)(=)([ \t]*)([^\t\n]*)',
bygroups(Name.Attribute, Whitespace, Operator, Whitespace, String)),
# standalone option, supported by some INI parsers
(r'(.+?)$', Name.Attribute),
@@ -162,7 +162,7 @@ class KconfigLexer(RegexLexer):
name = 'Kconfig'
aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
# Adjust this if new kconfig file names appear in your environment
- filenames = ['Kconfig*', '*Config.in*', 'external.in*',
+ filenames = ['Kconfig*', '*Config.in*', 'external.in*',
'standard-modules.in']
mimetypes = ['text/x-kconfig']
# No re.MULTILINE, indentation-aware help text needs line-by-line handling
@@ -307,12 +307,12 @@ class ApacheConfLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Whitespace),
- (r'#(.*\\\n)+.*$|(#.*?)$', Comment),
- (r'(<[^\s>/][^\s>]*)(?:(\s+)(.*))?(>)',
+ (r'#(.*\\\n)+.*$|(#.*?)$', Comment),
+ (r'(<[^\s>/][^\s>]*)(?:(\s+)(.*))?(>)',
bygroups(Name.Tag, Whitespace, String, Name.Tag)),
- (r'(</[^\s>]+)(>)',
- bygroups(Name.Tag, Name.Tag)),
- (r'[a-z]\w*', Name.Builtin, 'value'),
+ (r'(</[^\s>]+)(>)',
+ bygroups(Name.Tag, Name.Tag)),
+ (r'[a-z]\w*', Name.Builtin, 'value'),
(r'\.+', Text),
],
'value': [
@@ -322,12 +322,12 @@ class ApacheConfLexer(RegexLexer):
(r'[^\S\n]+', Whitespace),
(r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
(r'\d+', Number),
- (r'/([*a-z0-9][*\w./-]+)', String.Other),
+ (r'/([*a-z0-9][*\w./-]+)', String.Other),
(r'(on|off|none|any|all|double|email|dns|min|minimal|'
r'os|productonly|full|emerg|alert|crit|error|warn|'
r'notice|info|debug|registry|script|inetd|standalone|'
r'user|group)\b', Keyword),
- (r'"([^"\\]*(?:\\(.|\n)[^"\\]*)*)"', String.Double),
+ (r'"([^"\\]*(?:\\(.|\n)[^"\\]*)*)"', String.Double),
(r'[^\s"\\]+', Text)
],
}
@@ -356,7 +356,7 @@ class SquidConfLexer(RegexLexer):
"cache_effective_user", "cache_host", "cache_host_acl",
"cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
"cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
- "cache_peer_access", "cache_replacement_policy", "cache_stoplist",
+ "cache_peer_access", "cache_replacement_policy", "cache_stoplist",
"cache_stoplist_pattern", "cache_store_log", "cache_swap",
"cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
"client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
@@ -516,8 +516,8 @@ class LighttpdConfLexer(RegexLexer):
.. versionadded:: 0.11
"""
name = 'Lighttpd configuration file'
- aliases = ['lighttpd', 'lighty']
- filenames = ['lighttpd.conf']
+ aliases = ['lighttpd', 'lighty']
+ filenames = ['lighttpd.conf']
mimetypes = ['text/x-lighttpd-conf']
tokens = {
@@ -548,15 +548,15 @@ class DockerLexer(RegexLexer):
filenames = ['Dockerfile', '*.docker']
mimetypes = ['text/x-dockerfile-config']
- _keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
+ _keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
_bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)')
- _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex
+ _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
(r'#.*', Comment),
- (r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?',
+ (r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?',
bygroups(Keyword, Whitespace, String, Whitespace, Keyword, Whitespace, String)),
(r'(ONBUILD)(\s+)(%s)' % (_lb,), bygroups(Keyword, Whitespace, using(BashLexer))),
(r'(HEALTHCHECK)(\s+)((%s--\w+=\w+%s)*)' % (_lb, _lb),
@@ -584,54 +584,54 @@ class TerraformLexer(ExtendedRegexLexer):
filenames = ['*.tf']
mimetypes = ['application/x-tf', 'application/x-terraform']
- classes = ('backend', 'data', 'module', 'output', 'provider',
- 'provisioner', 'resource', 'variable')
- classes_re = "({})".format(('|').join(classes))
+ classes = ('backend', 'data', 'module', 'output', 'provider',
+ 'provisioner', 'resource', 'variable')
+ classes_re = "({})".format(('|').join(classes))
types = ('string', 'number', 'bool', 'list', 'tuple', 'map', 'set', 'object', 'null')
-
- numeric_functions = ('abs', 'ceil', 'floor', 'log', 'max',
- 'mix', 'parseint', 'pow', 'signum')
-
- string_functions = ('chomp', 'format', 'formatlist', 'indent',
- 'join', 'lower', 'regex', 'regexall', 'replace',
- 'split', 'strrev', 'substr', 'title', 'trim',
- 'trimprefix', 'trimsuffix', 'trimspace', 'upper'
- )
-
- collection_functions = ('alltrue', 'anytrue', 'chunklist', 'coalesce',
- 'coalescelist', 'compact', 'concat', 'contains',
- 'distinct', 'element', 'flatten', 'index', 'keys',
- 'length', 'list', 'lookup', 'map', 'matchkeys',
- 'merge', 'range', 'reverse', 'setintersection',
- 'setproduct', 'setsubtract', 'setunion', 'slice',
- 'sort', 'sum', 'transpose', 'values', 'zipmap'
- )
-
- encoding_functions = ('base64decode', 'base64encode', 'base64gzip',
- 'csvdecode', 'jsondecode', 'jsonencode', 'textdecodebase64',
- 'textencodebase64', 'urlencode', 'yamldecode', 'yamlencode')
-
-
- filesystem_functions = ('abspath', 'dirname', 'pathexpand', 'basename',
- 'file', 'fileexists', 'fileset', 'filebase64', 'templatefile')
-
- date_time_functions = ('formatdate', 'timeadd', 'timestamp')
-
- hash_crypto_functions = ('base64sha256', 'base64sha512', 'bcrypt', 'filebase64sha256',
- 'filebase64sha512', 'filemd5', 'filesha1', 'filesha256', 'filesha512',
- 'md5', 'rsadecrypt', 'sha1', 'sha256', 'sha512', 'uuid', 'uuidv5')
-
- ip_network_functions = ('cidrhost', 'cidrnetmask', 'cidrsubnet', 'cidrsubnets')
-
- type_conversion_functions = ('can', 'defaults', 'tobool', 'tolist', 'tomap',
- 'tonumber', 'toset', 'tostring', 'try')
-
- builtins = numeric_functions + string_functions + collection_functions + encoding_functions +\
- filesystem_functions + date_time_functions + hash_crypto_functions + ip_network_functions +\
- type_conversion_functions
- builtins_re = "({})".format(('|').join(builtins))
-
+
+ numeric_functions = ('abs', 'ceil', 'floor', 'log', 'max',
+ 'mix', 'parseint', 'pow', 'signum')
+
+ string_functions = ('chomp', 'format', 'formatlist', 'indent',
+ 'join', 'lower', 'regex', 'regexall', 'replace',
+ 'split', 'strrev', 'substr', 'title', 'trim',
+ 'trimprefix', 'trimsuffix', 'trimspace', 'upper'
+ )
+
+ collection_functions = ('alltrue', 'anytrue', 'chunklist', 'coalesce',
+ 'coalescelist', 'compact', 'concat', 'contains',
+ 'distinct', 'element', 'flatten', 'index', 'keys',
+ 'length', 'list', 'lookup', 'map', 'matchkeys',
+ 'merge', 'range', 'reverse', 'setintersection',
+ 'setproduct', 'setsubtract', 'setunion', 'slice',
+ 'sort', 'sum', 'transpose', 'values', 'zipmap'
+ )
+
+ encoding_functions = ('base64decode', 'base64encode', 'base64gzip',
+ 'csvdecode', 'jsondecode', 'jsonencode', 'textdecodebase64',
+ 'textencodebase64', 'urlencode', 'yamldecode', 'yamlencode')
+
+
+ filesystem_functions = ('abspath', 'dirname', 'pathexpand', 'basename',
+ 'file', 'fileexists', 'fileset', 'filebase64', 'templatefile')
+
+ date_time_functions = ('formatdate', 'timeadd', 'timestamp')
+
+ hash_crypto_functions = ('base64sha256', 'base64sha512', 'bcrypt', 'filebase64sha256',
+ 'filebase64sha512', 'filemd5', 'filesha1', 'filesha256', 'filesha512',
+ 'md5', 'rsadecrypt', 'sha1', 'sha256', 'sha512', 'uuid', 'uuidv5')
+
+ ip_network_functions = ('cidrhost', 'cidrnetmask', 'cidrsubnet', 'cidrsubnets')
+
+ type_conversion_functions = ('can', 'defaults', 'tobool', 'tolist', 'tomap',
+ 'tonumber', 'toset', 'tostring', 'try')
+
+ builtins = numeric_functions + string_functions + collection_functions + encoding_functions +\
+ filesystem_functions + date_time_functions + hash_crypto_functions + ip_network_functions +\
+ type_conversion_functions
+ builtins_re = "({})".format(('|').join(builtins))
+
def heredoc_callback(self, match, ctx):
# Parse a terraform heredoc
# match: 1 = <<[-]?, 2 = name 3 = rest of line
@@ -672,49 +672,49 @@ class TerraformLexer(ExtendedRegexLexer):
tokens = {
'root': [
- include('basic'),
- include('whitespace'),
-
- # Strings
- (r'(".*")', bygroups(String.Double)),
-
- # Constants
- (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Name.Constant),
-
- # Types
- (words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
-
- include('identifier'),
- include('punctuation'),
- (r'[0-9]+', Number),
+ include('basic'),
+ include('whitespace'),
+
+ # Strings
+ (r'(".*")', bygroups(String.Double)),
+
+ # Constants
+ (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Name.Constant),
+
+ # Types
+ (words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+
+ include('identifier'),
+ include('punctuation'),
+ (r'[0-9]+', Number),
],
'basic': [
- (r'\s*/\*', Comment.Multiline, 'comment'),
- (r'\s*#.*\n', Comment.Single),
- include('whitespace'),
-
- # e.g. terraform {
- # e.g. egress {
- (r'(\s*)([0-9a-zA-Z-_]+)(\s*)(=?)(\s*)(\{)',
+ (r'\s*/\*', Comment.Multiline, 'comment'),
+ (r'\s*#.*\n', Comment.Single),
+ include('whitespace'),
+
+ # e.g. terraform {
+ # e.g. egress {
+ (r'(\s*)([0-9a-zA-Z-_]+)(\s*)(=?)(\s*)(\{)',
bygroups(Whitespace, Name.Builtin, Whitespace, Operator, Whitespace, Punctuation)),
-
- # Assignment with attributes, e.g. something = ...
- (r'(\s*)([0-9a-zA-Z-_]+)(\s*)(=)(\s*)',
+
+ # Assignment with attributes, e.g. something = ...
+ (r'(\s*)([0-9a-zA-Z-_]+)(\s*)(=)(\s*)',
bygroups(Whitespace, Name.Attribute, Whitespace, Operator, Whitespace)),
-
- # Assignment with environment variables and similar, e.g. "something" = ...
- # or key value assignment, e.g. "SlotName" : ...
- (r'(\s*)("\S+")(\s*)([=:])(\s*)',
+
+ # Assignment with environment variables and similar, e.g. "something" = ...
+ # or key value assignment, e.g. "SlotName" : ...
+ (r'(\s*)("\S+")(\s*)([=:])(\s*)',
bygroups(Whitespace, Literal.String.Double, Whitespace, Operator, Whitespace)),
-
- # Functions, e.g. jsonencode(element("value"))
- (builtins_re + r'(\()', bygroups(Name.Function, Punctuation)),
-
- # List of attributes, e.g. ignore_changes = [last_modified, filename]
- (r'(\[)([a-z_,\s]+)(\])', bygroups(Punctuation, Name.Builtin, Punctuation)),
-
- # e.g. resource "aws_security_group" "allow_tls" {
- # e.g. backend "consul" {
+
+ # Functions, e.g. jsonencode(element("value"))
+ (builtins_re + r'(\()', bygroups(Name.Function, Punctuation)),
+
+ # List of attributes, e.g. ignore_changes = [last_modified, filename]
+ (r'(\[)([a-z_,\s]+)(\])', bygroups(Punctuation, Name.Builtin, Punctuation)),
+
+ # e.g. resource "aws_security_group" "allow_tls" {
+ # e.g. backend "consul" {
(classes_re + r'(\s+)', bygroups(Keyword.Reserved, Whitespace), 'blockname'),
# here-doc style delimited strings
@@ -723,18 +723,18 @@ class TerraformLexer(ExtendedRegexLexer):
heredoc_callback,
)
],
- 'blockname': [
- # e.g. resource "aws_security_group" "allow_tls" {
- # e.g. backend "consul" {
- (r'(\s*)("[0-9a-zA-Z-_]+")?(\s*)("[0-9a-zA-Z-_]+")(\s+)(\{)',
+ 'blockname': [
+ # e.g. resource "aws_security_group" "allow_tls" {
+ # e.g. backend "consul" {
+ (r'(\s*)("[0-9a-zA-Z-_]+")?(\s*)("[0-9a-zA-Z-_]+")(\s+)(\{)',
bygroups(Whitespace, Name.Class, Whitespace, Name.Variable, Whitespace, Punctuation)),
],
- 'identifier': [
- (r'\b(var\.[0-9a-zA-Z-_\.\[\]]+)\b', bygroups(Name.Variable)),
- (r'\b([0-9a-zA-Z-_\[\]]+\.[0-9a-zA-Z-_\.\[\]]+)\b', bygroups(Name.Variable)),
+ 'identifier': [
+ (r'\b(var\.[0-9a-zA-Z-_\.\[\]]+)\b', bygroups(Name.Variable)),
+ (r'\b([0-9a-zA-Z-_\[\]]+\.[0-9a-zA-Z-_\.\[\]]+)\b', bygroups(Name.Variable)),
],
'punctuation': [
- (r'[\[\]()\{\},.?:!=]', Punctuation),
+ (r'[\[\]()\{\},.?:!=]', Punctuation),
],
'comment': [
(r'[^*/]', Comment.Multiline),
@@ -1014,11 +1014,11 @@ class TOMLLexer(RegexLexer):
name = 'TOML'
aliases = ['toml']
- filenames = ['*.toml', 'Pipfile', 'poetry.lock']
+ filenames = ['*.toml', 'Pipfile', 'poetry.lock']
tokens = {
'root': [
- # Table
+ # Table
(r'^(\s*)(\[.*?\])$', bygroups(Whitespace, Keyword)),
# Basics, comments, strings
@@ -1026,7 +1026,7 @@ class TOMLLexer(RegexLexer):
(r'\n', Whitespace),
(r'#.*?$', Comment.Single),
# Basic string
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# Literal string
(r'\'\'\'(.*)\'\'\'', String),
(r'\'[^\']*\'', String),
@@ -1054,73 +1054,73 @@ class TOMLLexer(RegexLexer):
]
}
-
-class NestedTextLexer(RegexLexer):
- """
- Lexer for `NextedText <https://nestedtext.org>`_, a human-friendly data
- format.
-
- .. versionadded:: 2.9
- """
-
- name = 'NestedText'
- aliases = ['nestedtext', 'nt']
- filenames = ['*.nt']
-
- _quoted_dict_item = r'^(\s*)({0})(.*?)({0}: ?)(.*?)(\s*)$'
-
- tokens = {
- 'root': [
+
+class NestedTextLexer(RegexLexer):
+ """
+ Lexer for `NextedText <https://nestedtext.org>`_, a human-friendly data
+ format.
+
+ .. versionadded:: 2.9
+ """
+
+ name = 'NestedText'
+ aliases = ['nestedtext', 'nt']
+ filenames = ['*.nt']
+
+ _quoted_dict_item = r'^(\s*)({0})(.*?)({0}: ?)(.*?)(\s*)$'
+
+ tokens = {
+ 'root': [
(r'^(\s*)(#.*?)$', bygroups(Whitespace, Comment)),
(r'^(\s*)(>)( ?)(.*?)(\s*)$', bygroups(Whitespace, Punctuation, Whitespace, String, Whitespace)),
(r'^(\s*)(-)( ?)(.*?)(\s*)$', bygroups(Whitespace, Punctuation, Whitespace, String, Whitespace)),
(_quoted_dict_item.format("'"), bygroups(Whitespace, Punctuation, Name, Punctuation, String, Whitespace)),
(_quoted_dict_item.format('"'), bygroups(Whitespace, Punctuation, Name, Punctuation, String, Whitespace)),
(r'^(\s*)(.*?)(:)( ?)(.*?)(\s*)$', bygroups(Whitespace, Name, Punctuation, Whitespace, String, Whitespace)),
- ],
- }
-
-
-class SingularityLexer(RegexLexer):
- """
- Lexer for `Singularity definition files
- <https://www.sylabs.io/guides/3.0/user-guide/definition_files.html>`_.
-
- .. versionadded:: 2.6
- """
-
- name = 'Singularity'
- aliases = ['singularity']
- filenames = ['*.def', 'Singularity']
- flags = re.IGNORECASE | re.MULTILINE | re.DOTALL
-
- _headers = r'^(\s*)(bootstrap|from|osversion|mirrorurl|include|registry|namespace|includecmd)(:)'
+ ],
+ }
+
+
+class SingularityLexer(RegexLexer):
+ """
+ Lexer for `Singularity definition files
+ <https://www.sylabs.io/guides/3.0/user-guide/definition_files.html>`_.
+
+ .. versionadded:: 2.6
+ """
+
+ name = 'Singularity'
+ aliases = ['singularity']
+ filenames = ['*.def', 'Singularity']
+ flags = re.IGNORECASE | re.MULTILINE | re.DOTALL
+
+ _headers = r'^(\s*)(bootstrap|from|osversion|mirrorurl|include|registry|namespace|includecmd)(:)'
_section = r'^(%(?:pre|post|setup|environment|help|labels|test|runscript|files|startscript))(\s*)'
_appsect = r'^(%app(?:install|help|run|labels|env|test|files))(\s*)'
-
- tokens = {
- 'root': [
+
+ tokens = {
+ 'root': [
(_section, bygroups(Generic.Heading, Whitespace), 'script'),
(_appsect, bygroups(Generic.Heading, Whitespace), 'script'),
(_headers, bygroups(Whitespace, Keyword, Text)),
- (r'\s*#.*?\n', Comment),
- (r'\b(([0-9]+\.?[0-9]*)|(\.[0-9]+))\b', Number),
+ (r'\s*#.*?\n', Comment),
+ (r'\b(([0-9]+\.?[0-9]*)|(\.[0-9]+))\b', Number),
(r'[ \t]+', Whitespace),
- (r'(?!^\s*%).', Text),
- ],
- 'script': [
- (r'(.+?(?=^\s*%))|(.*)', using(BashLexer), '#pop'),
- ],
- }
-
- def analyse_text(text):
- """This is a quite simple script file, but there are a few keywords
- which seem unique to this language."""
- result = 0
- if re.search(r'\b(?:osversion|includecmd|mirrorurl)\b', text, re.IGNORECASE):
- result += 0.5
-
- if re.search(SingularityLexer._section[1:], text):
- result += 0.49
-
- return result
+ (r'(?!^\s*%).', Text),
+ ],
+ 'script': [
+ (r'(.+?(?=^\s*%))|(.*)', using(BashLexer), '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ """This is a quite simple script file, but there are a few keywords
+ which seem unique to this language."""
+ result = 0
+ if re.search(r'\b(?:osversion|includecmd|mirrorurl)\b', text, re.IGNORECASE):
+ result += 0.5
+
+ if re.search(SingularityLexer._section[1:], text):
+ result += 0.49
+
+ return result
diff --git a/contrib/python/Pygments/py3/pygments/lexers/console.py b/contrib/python/Pygments/py3/pygments/lexers/console.py
index 5d7fea78f5..e99c31c7b7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/console.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/console.py
@@ -4,7 +4,7 @@
Lexers for misc console output.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/crystal.py b/contrib/python/Pygments/py3/pygments/lexers/crystal.py
index d06ab0c060..99ce7329c8 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/crystal.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/crystal.py
@@ -4,7 +4,7 @@
Lexer for Crystal.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -56,11 +56,11 @@ class CrystalLexer(ExtendedRegexLexer):
ctx.pos = match.start(5)
ctx.end = match.end(5)
- # this may find other heredocs, so limit the recursion depth
- if len(heredocstack) < 100:
- yield from self.get_tokens_unprocessed(context=ctx)
- else:
- yield ctx.pos, String.Heredoc, match.group(5)
+ # this may find other heredocs, so limit the recursion depth
+ if len(heredocstack) < 100:
+ yield from self.get_tokens_unprocessed(context=ctx)
+ else:
+ yield ctx.pos, String.Heredoc, match.group(5)
ctx.pos = match.end()
if outermost:
@@ -90,9 +90,9 @@ class CrystalLexer(ExtendedRegexLexer):
def gen_crystalstrings_rules():
states = {}
states['strings'] = [
- (r'\:\w+[!?]?', String.Symbol),
- (words(CRYSTAL_OPERATORS, prefix=r'\:'), String.Symbol),
- (r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol),
+ (r'\:\w+[!?]?', String.Symbol),
+ (words(CRYSTAL_OPERATORS, prefix=r'\:'), String.Symbol),
+ (r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol),
# This allows arbitrary text after '\ for simplicity
(r"'(\\\\|\\'|[^']|\\[^'\\]+)'", String.Char),
(r':"', String.Symbol, 'simple-sym'),
@@ -113,42 +113,42 @@ class CrystalLexer(ExtendedRegexLexer):
(end, ttype, '#pop'),
]
- # https://crystal-lang.org/docs/syntax_and_semantics/literals/string.html#percent-string-literals
+ # https://crystal-lang.org/docs/syntax_and_semantics/literals/string.html#percent-string-literals
for lbrace, rbrace, bracecc, name in \
('\\{', '\\}', '{}', 'cb'), \
('\\[', '\\]', '\\[\\]', 'sb'), \
('\\(', '\\)', '()', 'pa'), \
- ('<', '>', '<>', 'ab'), \
- ('\\|', '\\|', '\\|', 'pi'):
+ ('<', '>', '<>', 'ab'), \
+ ('\\|', '\\|', '\\|', 'pi'):
states[name+'-intp-string'] = [
- (r'\\' + lbrace, String.Other),
- ] + (lbrace != rbrace) * [
+ (r'\\' + lbrace, String.Other),
+ ] + (lbrace != rbrace) * [
(lbrace, String.Other, '#push'),
- ] + [
+ ] + [
(rbrace, String.Other, '#pop'),
include('string-intp-escaped'),
(r'[\\#' + bracecc + ']', String.Other),
(r'[^\\#' + bracecc + ']+', String.Other),
]
- states['strings'].append((r'%Q?' + lbrace, String.Other,
+ states['strings'].append((r'%Q?' + lbrace, String.Other,
name+'-intp-string'))
states[name+'-string'] = [
(r'\\[\\' + bracecc + ']', String.Other),
- ] + (lbrace != rbrace) * [
+ ] + (lbrace != rbrace) * [
(lbrace, String.Other, '#push'),
- ] + [
+ ] + [
(rbrace, String.Other, '#pop'),
(r'[\\#' + bracecc + ']', String.Other),
(r'[^\\#' + bracecc + ']+', String.Other),
]
- # https://crystal-lang.org/docs/syntax_and_semantics/literals/array.html#percent-array-literals
- states['strings'].append((r'%[qwi]' + lbrace, String.Other,
+ # https://crystal-lang.org/docs/syntax_and_semantics/literals/array.html#percent-array-literals
+ states['strings'].append((r'%[qwi]' + lbrace, String.Other,
name+'-string'))
states[name+'-regex'] = [
(r'\\[\\' + bracecc + ']', String.Regex),
- ] + (lbrace != rbrace) * [
+ ] + (lbrace != rbrace) * [
(lbrace, String.Regex, '#push'),
- ] + [
+ ] + [
(rbrace + '[imsx]*', String.Regex, '#pop'),
include('string-intp'),
(r'[\\#' + bracecc + ']', String.Regex),
@@ -164,16 +164,16 @@ class CrystalLexer(ExtendedRegexLexer):
(r'#.*?$', Comment.Single),
# keywords
(words('''
- abstract asm begin break case do else elsif end ensure extend if in
- include next of private protected require rescue return select self super
- then unless until when while with yield
+ abstract asm begin break case do else elsif end ensure extend if in
+ include next of private protected require rescue return select self super
+ then unless until when while with yield
'''.split(), suffix=r'\b'), Keyword),
- (words('''
- previous_def forall out uninitialized __DIR__ __FILE__ __LINE__
- __END_LINE__
- '''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Keyword.Pseudo),
- # https://crystal-lang.org/docs/syntax_and_semantics/is_a.html
- (r'\.(is_a\?|nil\?|responds_to\?|as\?|as\b)', Keyword.Pseudo),
+ (words('''
+ previous_def forall out uninitialized __DIR__ __FILE__ __LINE__
+ __END_LINE__
+ '''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Keyword.Pseudo),
+ # https://crystal-lang.org/docs/syntax_and_semantics/is_a.html
+ (r'\.(is_a\?|nil\?|responds_to\?|as\?|as\b)', Keyword.Pseudo),
(words(['true', 'false', 'nil'], suffix=r'\b'), Keyword.Constant),
# start of function, class and module names
(r'(module|lib)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
@@ -181,23 +181,23 @@ class CrystalLexer(ExtendedRegexLexer):
(r'(def|fun|macro)(\s+)((?:[a-zA-Z_]\w*::)*)',
bygroups(Keyword, Whitespace, Name.Namespace), 'funcname'),
(r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
- (r'(annotation|class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)',
+ (r'(annotation|class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)',
bygroups(Keyword, Whitespace, Name.Namespace), 'classname'),
- # https://crystal-lang.org/api/toplevel.html
- (words('''
- instance_sizeof offsetof pointerof sizeof typeof
- '''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Keyword.Pseudo),
+ # https://crystal-lang.org/api/toplevel.html
+ (words('''
+ instance_sizeof offsetof pointerof sizeof typeof
+ '''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Keyword.Pseudo),
# macros
- (r'(?<!\.)(debugger\b|p!|pp!|record\b|spawn\b)', Name.Builtin.Pseudo),
+ (r'(?<!\.)(debugger\b|p!|pp!|record\b|spawn\b)', Name.Builtin.Pseudo),
# builtins
(words('''
- abort at_exit caller exit gets loop main p pp print printf puts
- raise rand read_line sleep spawn sprintf system
+ abort at_exit caller exit gets loop main p pp print printf puts
+ raise rand read_line sleep spawn sprintf system
'''.split(), prefix=r'(?<!\.)', suffix=r'\b'), Name.Builtin),
- # https://crystal-lang.org/api/Object.html#macro-summary
- (r'(?<!\.)(((class_)?((getter|property)\b[!?]?|setter\b))|'
- r'(def_(clone|equals|equals_and_hash|hash)|delegate|forward_missing_to)\b)',
- Name.Builtin.Pseudo),
+ # https://crystal-lang.org/api/Object.html#macro-summary
+ (r'(?<!\.)(((class_)?((getter|property)\b[!?]?|setter\b))|'
+ r'(def_(clone|equals|equals_and_hash|hash)|delegate|forward_missing_to)\b)',
+ Name.Builtin.Pseudo),
# normal heredocs
(r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
heredoc_callback),
@@ -269,17 +269,17 @@ class CrystalLexer(ExtendedRegexLexer):
(r'\$-[0adFiIlpvw]', Name.Variable.Global),
(r'::', Operator),
include('strings'),
- # https://crystal-lang.org/reference/syntax_and_semantics/literals/char.html
+ # https://crystal-lang.org/reference/syntax_and_semantics/literals/char.html
(r'\?(\\[MC]-)*' # modifiers
- r'(\\([\\abefnrtv#"\']|[0-7]{1,3}|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|u\{[a-fA-F0-9 ]+\})|\S)'
+ r'(\\([\\abefnrtv#"\']|[0-7]{1,3}|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|u\{[a-fA-F0-9 ]+\})|\S)'
r'(?!\w)',
String.Char),
- (r'[A-Z][A-Z_]+\b(?!::|\.)', Name.Constant),
+ (r'[A-Z][A-Z_]+\b(?!::|\.)', Name.Constant),
# macro expansion
(r'\{%', String.Interpol, 'in-macro-control'),
(r'\{\{', String.Interpol, 'in-macro-expr'),
- # annotations
- (r'(@\[)(\s*)([A-Z]\w*(::[A-Z]\w*)*)',
+ # annotations
+ (r'(@\[)(\s*)([A-Z]\w*(::[A-Z]\w*)*)',
bygroups(Operator, Whitespace, Name.Decorator), 'in-annot'),
# this is needed because Crystal attributes can look
# like keywords (class) or like this: ` ?!?
@@ -317,9 +317,9 @@ class CrystalLexer(ExtendedRegexLexer):
(r'#\{', String.Interpol, 'in-intp'),
],
'string-escaped': [
- # https://crystal-lang.org/reference/syntax_and_semantics/literals/string.html
- (r'\\([\\abefnrtv#"\']|[0-7]{1,3}|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|u\{[a-fA-F0-9 ]+\})',
- String.Escape)
+ # https://crystal-lang.org/reference/syntax_and_semantics/literals/string.html
+ (r'\\([\\abefnrtv#"\']|[0-7]{1,3}|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|u\{[a-fA-F0-9 ]+\})',
+ String.Escape)
],
'string-intp-escaped': [
include('string-intp'),
@@ -349,7 +349,7 @@ class CrystalLexer(ExtendedRegexLexer):
'in-macro-control': [
(r'\{%', String.Interpol, '#push'),
(r'%\}', String.Interpol, '#pop'),
- (r'(for|verbatim)\b', Keyword),
+ (r'(for|verbatim)\b', Keyword),
include('root'),
],
'in-macro-expr': [
@@ -357,7 +357,7 @@ class CrystalLexer(ExtendedRegexLexer):
(r'\}\}', String.Interpol, '#pop'),
include('root'),
],
- 'in-annot': [
+ 'in-annot': [
(r'\[', Operator, '#push'),
(r'\]', Operator, '#pop'),
include('root'),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/csound.py b/contrib/python/Pygments/py3/pygments/lexers/csound.py
index a871de8d72..c8fc4c815b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/csound.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/csound.py
@@ -4,7 +4,7 @@
Lexers for Csound languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,7 +13,7 @@ import re
from pygments.lexer import RegexLexer, bygroups, default, include, using, words
from pygments.token import Comment, Error, Keyword, Name, Number, Operator, Punctuation, \
String, Text, Whitespace
-from pygments.lexers._csound_builtins import OPCODES, DEPRECATED_OPCODES, REMOVED_OPCODES
+from pygments.lexers._csound_builtins import OPCODES, DEPRECATED_OPCODES, REMOVED_OPCODES
from pygments.lexers.html import HtmlLexer
from pygments.lexers.python import PythonLexer
from pygments.lexers.scripting import LuaLexer
@@ -34,7 +34,7 @@ class CsoundLexer(RegexLexer):
'preprocessor directives': [
(r'#(?:e(?:nd(?:if)?|lse)\b|##)|@@?[ \t]*\d+', Comment.Preproc),
- (r'#includestr', Comment.Preproc, 'includestr directive'),
+ (r'#includestr', Comment.Preproc, 'includestr directive'),
(r'#include', Comment.Preproc, 'include directive'),
(r'#[ \t]*define', Comment.Preproc, 'define directive'),
(r'#(?:ifn?def|undef)\b', Comment.Preproc, 'macro directive')
@@ -44,10 +44,10 @@ class CsoundLexer(RegexLexer):
include('whitespace'),
(r'([^ \t]).*?\1', String, '#pop')
],
- 'includestr directive': [
- include('whitespace'),
- (r'"', String, ('#pop', 'quoted string'))
- ],
+ 'includestr directive': [
+ include('whitespace'),
+ (r'"', String, ('#pop', 'quoted string'))
+ ],
'define directive': [
(r'\n', Whitespace),
@@ -118,13 +118,13 @@ class CsoundLexer(RegexLexer):
(r'\d+', Number.Integer)
],
- 'quoted string': [
- (r'"', String, '#pop'),
- (r'[^"$]+', String),
- include('macro uses'),
- (r'[$]', String)
- ],
-
+ 'quoted string': [
+ (r'"', String, '#pop'),
+ (r'[^"$]+', String),
+ include('macro uses'),
+ (r'[$]', String)
+ ],
+
'braced string': [
# Do nothing. This must be defined in subclasses.
]
@@ -133,7 +133,7 @@ class CsoundLexer(RegexLexer):
class CsoundScoreLexer(CsoundLexer):
"""
- For `Csound <https://csound.com>`_ scores.
+ For `Csound <https://csound.com>`_ scores.
.. versionadded:: 2.1
"""
@@ -148,14 +148,14 @@ class CsoundScoreLexer(CsoundLexer):
include('whitespace and macro uses'),
include('preprocessor directives'),
- (r'[aBbCdefiqstvxy]', Keyword),
+ (r'[aBbCdefiqstvxy]', Keyword),
# There is also a w statement that is generated internally and should not be
# used; see https://github.com/csound/csound/issues/750.
(r'z', Keyword.Constant),
# z is a constant equal to 800,000,000,000. 800 billion seconds is about
# 25,367.8 years. See also
- # https://csound.com/docs/manual/ScoreTop.html and
+ # https://csound.com/docs/manual/ScoreTop.html and
# https://github.com/csound/csound/search?q=stof+path%3AEngine+filename%3Asread.c.
(r'([nNpP][pP])(\d+)', bygroups(Keyword, Number.Integer)),
@@ -188,8 +188,8 @@ class CsoundScoreLexer(CsoundLexer):
include('root')
],
- # Braced strings are not allowed in Csound scores, but this is needed because the
- # superclass includes it.
+ # Braced strings are not allowed in Csound scores, but this is needed because the
+ # superclass includes it.
'braced string': [
(r'\}\}', String, '#pop'),
(r'[^}]|\}(?!\})', String)
@@ -199,7 +199,7 @@ class CsoundScoreLexer(CsoundLexer):
class CsoundOrchestraLexer(CsoundLexer):
"""
- For `Csound <https://csound.com>`_ orchestras.
+ For `Csound <https://csound.com>`_ orchestras.
.. versionadded:: 2.1
"""
@@ -216,25 +216,25 @@ class CsoundOrchestraLexer(CsoundLexer):
yield match.start(), Name.Function, opcode
def name_callback(lexer, match):
- type_annotation_token = Keyword.Type
-
+ type_annotation_token = Keyword.Type
+
name = match.group(1)
- if name in OPCODES or name in DEPRECATED_OPCODES or name in REMOVED_OPCODES:
+ if name in OPCODES or name in DEPRECATED_OPCODES or name in REMOVED_OPCODES:
yield match.start(), Name.Builtin, name
elif name in lexer.user_defined_opcodes:
yield match.start(), Name.Function, name
else:
- type_annotation_token = Name
- name_match = re.search(r'^(g?[afikSw])(\w+)', name)
- if name_match:
- yield name_match.start(1), Keyword.Type, name_match.group(1)
- yield name_match.start(2), Name, name_match.group(2)
+ type_annotation_token = Name
+ name_match = re.search(r'^(g?[afikSw])(\w+)', name)
+ if name_match:
+ yield name_match.start(1), Keyword.Type, name_match.group(1)
+ yield name_match.start(2), Name, name_match.group(2)
else:
yield match.start(), Name, name
- if match.group(2):
- yield match.start(2), Punctuation, match.group(2)
- yield match.start(3), type_annotation_token, match.group(3)
+ if match.group(2):
+ yield match.start(2), Punctuation, match.group(2)
+ yield match.start(3), type_annotation_token, match.group(3)
tokens = {
'root': [
@@ -329,24 +329,24 @@ class CsoundOrchestraLexer(CsoundLexer):
(r'\\(?:[\\abnrt"]|[0-7]{1,3})', String.Escape)
],
# Format specifiers are highlighted in all strings, even though only
- # fprintks https://csound.com/docs/manual/fprintks.html
- # fprints https://csound.com/docs/manual/fprints.html
- # printf/printf_i https://csound.com/docs/manual/printf.html
- # printks https://csound.com/docs/manual/printks.html
- # prints https://csound.com/docs/manual/prints.html
- # sprintf https://csound.com/docs/manual/sprintf.html
- # sprintfk https://csound.com/docs/manual/sprintfk.html
- # work with strings that contain format specifiers. In addition, these opcodes’
- # handling of format specifiers is inconsistent:
- # - fprintks and fprints accept %a and %A specifiers, and accept %s specifiers
- # starting in Csound 6.15.0.
- # - printks and prints accept %a and %A specifiers, but don’t accept %s
- # specifiers.
- # - printf, printf_i, sprintf, and sprintfk don’t accept %a and %A specifiers,
- # but accept %s specifiers.
+ # fprintks https://csound.com/docs/manual/fprintks.html
+ # fprints https://csound.com/docs/manual/fprints.html
+ # printf/printf_i https://csound.com/docs/manual/printf.html
+ # printks https://csound.com/docs/manual/printks.html
+ # prints https://csound.com/docs/manual/prints.html
+ # sprintf https://csound.com/docs/manual/sprintf.html
+ # sprintfk https://csound.com/docs/manual/sprintfk.html
+ # work with strings that contain format specifiers. In addition, these opcodes’
+ # handling of format specifiers is inconsistent:
+ # - fprintks and fprints accept %a and %A specifiers, and accept %s specifiers
+ # starting in Csound 6.15.0.
+ # - printks and prints accept %a and %A specifiers, but don’t accept %s
+ # specifiers.
+ # - printf, printf_i, sprintf, and sprintfk don’t accept %a and %A specifiers,
+ # but accept %s specifiers.
# See https://github.com/csound/csound/issues/747 for more information.
'format specifiers': [
- (r'%[#0\- +]*\d*(?:\.\d+)?[AE-GXac-giosux]', String.Interpol),
+ (r'%[#0\- +]*\d*(?:\.\d+)?[AE-GXac-giosux]', String.Interpol),
(r'%%', String.Escape)
],
@@ -374,7 +374,7 @@ class CsoundOrchestraLexer(CsoundLexer):
'Csound score opcode': [
include('whitespace and macro uses'),
- (r'"', String, 'quoted string'),
+ (r'"', String, 'quoted string'),
(r'\{\{', String, 'Csound score'),
(r'\n', Whitespace, '#pop')
],
@@ -385,7 +385,7 @@ class CsoundOrchestraLexer(CsoundLexer):
'Python opcode': [
include('whitespace and macro uses'),
- (r'"', String, 'quoted string'),
+ (r'"', String, 'quoted string'),
(r'\{\{', String, 'Python'),
(r'\n', Whitespace, '#pop')
],
@@ -396,7 +396,7 @@ class CsoundOrchestraLexer(CsoundLexer):
'Lua opcode': [
include('whitespace and macro uses'),
- (r'"', String, 'quoted string'),
+ (r'"', String, 'quoted string'),
(r'\{\{', String, 'Lua'),
(r'\n', Whitespace, '#pop')
],
@@ -409,7 +409,7 @@ class CsoundOrchestraLexer(CsoundLexer):
class CsoundDocumentLexer(RegexLexer):
"""
- For `Csound <https://csound.com>`_ documents.
+ For `Csound <https://csound.com>`_ documents.
.. versionadded:: 2.1
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/css.py b/contrib/python/Pygments/py3/pygments/lexers/css.py
index 4b284cc8fa..7820f6e083 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/css.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/css.py
@@ -4,7 +4,7 @@
Lexers for CSS and related stylesheet formats.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -288,8 +288,8 @@ class CssLexer(RegexLexer):
(r'(@)([\w-]+)', bygroups(Punctuation, Keyword), 'atrule'),
(r'[\w-]+', Name.Tag),
(r'[~^*!%&$\[\]()<>|+=@:;,./?-]', Operator),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
],
'atrule': [
(r'\{', Punctuation, 'atcontent'),
@@ -310,7 +310,7 @@ class CssLexer(RegexLexer):
(r'('+r'|'.join(_css_properties)+r')(\s*)(\:)',
bygroups(Keyword, Whitespace, Punctuation), 'value-start'),
(r'([-]+[a-zA-Z_][\w-]*)(\s*)(\:)', bygroups(Name.Variable, Whitespace, Punctuation),
- 'value-start'),
+ 'value-start'),
(r'([a-zA-Z_][\w-]*)(\s*)(\:)', bygroups(Name, Whitespace, Punctuation),
'value-start'),
@@ -336,15 +336,15 @@ class CssLexer(RegexLexer):
(r'[~^*!%&<>|+=@:./?-]+', Operator),
(r'[\[\](),]+', Punctuation),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'[a-zA-Z_][\w-]*', Name),
(r';', Punctuation, '#pop'),
(r'\}', Punctuation, '#pop:2'),
],
'function-start': [
(r'\s+', Whitespace),
- (r'[-]+([\w+]+[-]*)+', Name.Variable),
+ (r'[-]+([\w+]+[-]*)+', Name.Variable),
include('urls'),
(words(_vendor_prefixes,), Keyword.Pseudo),
(words(_keyword_values, suffix=r'\b'), Keyword.Constant),
@@ -360,9 +360,9 @@ class CssLexer(RegexLexer):
(r'/\*(?:.|\n)*?\*/', Comment),
include('numeric-values'),
(r'[*+/-]', Operator),
- (r',', Punctuation),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r',', Punctuation),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'[a-zA-Z_-]\w*', Name),
(r'\)', Punctuation, '#pop'),
],
@@ -398,7 +398,7 @@ common_sass_tokens = {
'behind', 'below', 'bidi-override', 'blink', 'block', 'bold', 'bolder', 'both',
'capitalize', 'center-left', 'center-right', 'center', 'circle',
'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
- 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
+ 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
@@ -613,7 +613,7 @@ class SassLexer(ExtendedRegexLexer):
(r"\*/", Comment, '#pop'),
],
}
- for group, common in common_sass_tokens.items():
+ for group, common in common_sass_tokens.items():
tokens[group] = copy.copy(common)
tokens['value'].append((r'\n', Whitespace, 'root'))
tokens['selector'].append((r'\n', Whitespace, 'root'))
@@ -663,7 +663,7 @@ class ScssLexer(RegexLexer):
(r"\*/", Comment, '#pop'),
],
}
- for group, common in common_sass_tokens.items():
+ for group, common in common_sass_tokens.items():
tokens[group] = copy.copy(common)
tokens['value'].extend([(r'\n', Whitespace), (r'[;{}]', Punctuation, '#pop')])
tokens['selector'].extend([(r'\n', Whitespace), (r'[;{}]', Punctuation, '#pop')])
@@ -688,7 +688,7 @@ class LessCssLexer(CssLexer):
],
'content': [
(r'\{', Punctuation, '#push'),
- (r'//.*\n', Comment.Single),
+ (r'//.*\n', Comment.Single),
inherit,
],
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/d.py b/contrib/python/Pygments/py3/pygments/lexers/d.py
index 7844de550f..8e8397e6cf 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/d.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/d.py
@@ -4,7 +4,7 @@
Lexers for D languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -46,17 +46,17 @@ class DLexer(RegexLexer):
'module', 'new', 'nothrow', 'out', 'override', 'package', 'pragma',
'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope',
'shared', 'static', 'struct', 'super', 'switch', 'synchronized',
- 'template', 'this', 'throw', 'try', 'typeid', 'typeof',
+ 'template', 'this', 'throw', 'try', 'typeid', 'typeof',
'union', 'unittest', 'version', 'volatile', 'while', 'with',
'__gshared', '__traits', '__vector', '__parameters'),
suffix=r'\b'),
Keyword),
(words((
- # Removed in 2.072
- 'typedef', ),
- suffix=r'\b'),
- Keyword.Removed),
- (words((
+ # Removed in 2.072
+ 'typedef', ),
+ suffix=r'\b'),
+ Keyword.Removed),
+ (words((
'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'creal',
'dchar', 'double', 'float', 'idouble', 'ifloat', 'int', 'ireal',
'long', 'real', 'short', 'ubyte', 'ucent', 'uint', 'ulong',
@@ -64,9 +64,9 @@ class DLexer(RegexLexer):
Keyword.Type),
(r'(false|true|null)\b', Keyword.Constant),
(words((
- '__FILE__', '__FILE_FULL_PATH__', '__MODULE__', '__LINE__', '__FUNCTION__',
- '__PRETTY_FUNCTION__', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__',
- '__VENDOR__', '__VERSION__'), suffix=r'\b'),
+ '__FILE__', '__FILE_FULL_PATH__', '__MODULE__', '__LINE__', '__FUNCTION__',
+ '__PRETTY_FUNCTION__', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__',
+ '__VENDOR__', '__VERSION__'), suffix=r'\b'),
Keyword.Pseudo),
(r'macro\b', Keyword.Reserved),
(r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin),
@@ -97,7 +97,7 @@ class DLexer(RegexLexer):
# -- AlternateWysiwygString
(r'`[^`]*`[cwd]?', String),
# -- DoubleQuotedString
- (r'"(\\\\|\\[^\\]|[^"\\])*"[cwd]?', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"[cwd]?', String),
# -- EscapeSequence
(r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}"
r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)",
@@ -229,7 +229,7 @@ class CrocLexer(RegexLexer):
(r'@`(``|[^`])*`', String),
(r"@'(''|[^'])*'", String),
# -- DoubleQuotedString
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# Tokens
(r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>'
r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dalvik.py b/contrib/python/Pygments/py3/pygments/lexers/dalvik.py
index 6d9fb5464e..cd6bdca578 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dalvik.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dalvik.py
@@ -4,7 +4,7 @@
Pygments lexers for Dalvik VM-related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/data.py b/contrib/python/Pygments/py3/pygments/lexers/data.py
index c702d42093..0ce35221e8 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/data.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/data.py
@@ -4,7 +4,7 @@
Lexers for data file format.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,7 +20,7 @@ class YamlLexerContext(LexerContext):
"""Indentation context for the YAML lexer."""
def __init__(self, *args, **kwds):
- super().__init__(*args, **kwds)
+ super().__init__(*args, **kwds)
self.indent_stack = []
self.indent = -1
self.next_indent = 0
@@ -230,7 +230,7 @@ class YamlLexer(ExtendedRegexLexer):
# whitespaces separating tokens
(r'[ ]+', Whitespace),
# key with colon
- (r'''([^#,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
+ (r'''([^#,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
bygroups(Name.Tag, set_indent(Punctuation, implicit=True))),
# tags, anchors and aliases,
include('descriptors'),
@@ -430,10 +430,10 @@ class YamlLexer(ExtendedRegexLexer):
def get_tokens_unprocessed(self, text=None, context=None):
if context is None:
context = YamlLexerContext(text, 0)
- return super().get_tokens_unprocessed(text, context)
+ return super().get_tokens_unprocessed(text, context)
-class JsonLexer(Lexer):
+class JsonLexer(Lexer):
"""
For JSON data structures.
@@ -441,154 +441,154 @@ class JsonLexer(Lexer):
"""
name = 'JSON'
- aliases = ['json', 'json-object']
- filenames = ['*.json', 'Pipfile.lock']
- mimetypes = ['application/json', 'application/json-object']
-
- # No validation of integers, floats, or constants is done.
- # As long as the characters are members of the following
- # sets, the token will be considered valid. For example,
- #
- # "--1--" is parsed as an integer
- # "1...eee" is parsed as a float
- # "trustful" is parsed as a constant
- #
- integers = set('-0123456789')
- floats = set('.eE+')
- constants = set('truefalsenull') # true|false|null
- hexadecimals = set('0123456789abcdefABCDEF')
- punctuations = set('{}[],')
- whitespaces = {'\u0020', '\u000a', '\u000d', '\u0009'}
-
- def get_tokens_unprocessed(self, text):
- """Parse JSON data."""
-
- in_string = False
- in_escape = False
- in_unicode_escape = 0
- in_whitespace = False
- in_constant = False
- in_number = False
- in_float = False
- in_punctuation = False
-
- start = 0
-
- # The queue is used to store data that may need to be tokenized
- # differently based on what follows. In particular, JSON object
- # keys are tokenized differently than string values, but cannot
- # be distinguished until punctuation is encountered outside the
- # string.
- #
- # A ":" character after the string indicates that the string is
- # an object key; any other character indicates the string is a
- # regular string value.
- #
- # The queue holds tuples that contain the following data:
- #
- # (start_index, token_type, text)
- #
- # By default the token type of text in double quotes is
- # String.Double. The token type will be replaced if a colon
- # is encountered after the string closes.
- #
- queue = []
-
- for stop, character in enumerate(text):
- if in_string:
- if in_unicode_escape:
- if character in self.hexadecimals:
- in_unicode_escape -= 1
- if not in_unicode_escape:
- in_escape = False
- else:
- in_unicode_escape = 0
- in_escape = False
-
- elif in_escape:
- if character == 'u':
- in_unicode_escape = 4
- else:
- in_escape = False
-
- elif character == '\\':
- in_escape = True
-
- elif character == '"':
- queue.append((start, String.Double, text[start:stop + 1]))
- in_string = False
- in_escape = False
- in_unicode_escape = 0
-
- continue
-
- elif in_whitespace:
- if character in self.whitespaces:
- continue
-
- if queue:
+ aliases = ['json', 'json-object']
+ filenames = ['*.json', 'Pipfile.lock']
+ mimetypes = ['application/json', 'application/json-object']
+
+ # No validation of integers, floats, or constants is done.
+ # As long as the characters are members of the following
+ # sets, the token will be considered valid. For example,
+ #
+ # "--1--" is parsed as an integer
+ # "1...eee" is parsed as a float
+ # "trustful" is parsed as a constant
+ #
+ integers = set('-0123456789')
+ floats = set('.eE+')
+ constants = set('truefalsenull') # true|false|null
+ hexadecimals = set('0123456789abcdefABCDEF')
+ punctuations = set('{}[],')
+ whitespaces = {'\u0020', '\u000a', '\u000d', '\u0009'}
+
+ def get_tokens_unprocessed(self, text):
+ """Parse JSON data."""
+
+ in_string = False
+ in_escape = False
+ in_unicode_escape = 0
+ in_whitespace = False
+ in_constant = False
+ in_number = False
+ in_float = False
+ in_punctuation = False
+
+ start = 0
+
+ # The queue is used to store data that may need to be tokenized
+ # differently based on what follows. In particular, JSON object
+ # keys are tokenized differently than string values, but cannot
+ # be distinguished until punctuation is encountered outside the
+ # string.
+ #
+ # A ":" character after the string indicates that the string is
+ # an object key; any other character indicates the string is a
+ # regular string value.
+ #
+ # The queue holds tuples that contain the following data:
+ #
+ # (start_index, token_type, text)
+ #
+ # By default the token type of text in double quotes is
+ # String.Double. The token type will be replaced if a colon
+ # is encountered after the string closes.
+ #
+ queue = []
+
+ for stop, character in enumerate(text):
+ if in_string:
+ if in_unicode_escape:
+ if character in self.hexadecimals:
+ in_unicode_escape -= 1
+ if not in_unicode_escape:
+ in_escape = False
+ else:
+ in_unicode_escape = 0
+ in_escape = False
+
+ elif in_escape:
+ if character == 'u':
+ in_unicode_escape = 4
+ else:
+ in_escape = False
+
+ elif character == '\\':
+ in_escape = True
+
+ elif character == '"':
+ queue.append((start, String.Double, text[start:stop + 1]))
+ in_string = False
+ in_escape = False
+ in_unicode_escape = 0
+
+ continue
+
+ elif in_whitespace:
+ if character in self.whitespaces:
+ continue
+
+ if queue:
queue.append((start, Whitespace, text[start:stop]))
- else:
+ else:
yield start, Whitespace, text[start:stop]
- in_whitespace = False
- # Fall through so the new character can be evaluated.
-
- elif in_constant:
- if character in self.constants:
- continue
-
- yield start, Keyword.Constant, text[start:stop]
- in_constant = False
- # Fall through so the new character can be evaluated.
-
- elif in_number:
- if character in self.integers:
- continue
- elif character in self.floats:
- in_float = True
- continue
-
- if in_float:
- yield start, Number.Float, text[start:stop]
- else:
- yield start, Number.Integer, text[start:stop]
- in_number = False
- in_float = False
- # Fall through so the new character can be evaluated.
-
- elif in_punctuation:
- if character in self.punctuations:
- continue
-
- yield start, Punctuation, text[start:stop]
- in_punctuation = False
- # Fall through so the new character can be evaluated.
-
- start = stop
-
- if character == '"':
- in_string = True
-
- elif character in self.whitespaces:
- in_whitespace = True
-
- elif character in {'f', 'n', 't'}: # The first letters of true|false|null
- # Exhaust the queue. Accept the existing token types.
- yield from queue
- queue.clear()
-
- in_constant = True
-
- elif character in self.integers:
- # Exhaust the queue. Accept the existing token types.
- yield from queue
- queue.clear()
-
- in_number = True
-
- elif character == ':':
- # Yield from the queue. Replace string token types.
- for _start, _token, _text in queue:
+ in_whitespace = False
+ # Fall through so the new character can be evaluated.
+
+ elif in_constant:
+ if character in self.constants:
+ continue
+
+ yield start, Keyword.Constant, text[start:stop]
+ in_constant = False
+ # Fall through so the new character can be evaluated.
+
+ elif in_number:
+ if character in self.integers:
+ continue
+ elif character in self.floats:
+ in_float = True
+ continue
+
+ if in_float:
+ yield start, Number.Float, text[start:stop]
+ else:
+ yield start, Number.Integer, text[start:stop]
+ in_number = False
+ in_float = False
+ # Fall through so the new character can be evaluated.
+
+ elif in_punctuation:
+ if character in self.punctuations:
+ continue
+
+ yield start, Punctuation, text[start:stop]
+ in_punctuation = False
+ # Fall through so the new character can be evaluated.
+
+ start = stop
+
+ if character == '"':
+ in_string = True
+
+ elif character in self.whitespaces:
+ in_whitespace = True
+
+ elif character in {'f', 'n', 't'}: # The first letters of true|false|null
+ # Exhaust the queue. Accept the existing token types.
+ yield from queue
+ queue.clear()
+
+ in_constant = True
+
+ elif character in self.integers:
+ # Exhaust the queue. Accept the existing token types.
+ yield from queue
+ queue.clear()
+
+ in_number = True
+
+ elif character == ':':
+ # Yield from the queue. Replace string token types.
+ for _start, _token, _text in queue:
# There can be only two types of tokens before a ':':
# Whitespace, or a quoted string. If it's a quoted string
# we emit Name.Tag, otherwise, we yield the whitespace
@@ -596,65 +596,65 @@ class JsonLexer(Lexer):
# allows for things like '"foo" "bar": "baz"' but we're not
# a validating JSON lexer so it's acceptable
if _token is Whitespace:
- yield _start, _token, _text
- elif _token is String.Double:
- yield _start, Name.Tag, _text
- else:
- yield _start, Error, _text
- queue.clear()
-
- in_punctuation = True
-
- elif character in self.punctuations:
- # Exhaust the queue. Accept the existing token types.
- yield from queue
- queue.clear()
-
- in_punctuation = True
-
- else:
- # Exhaust the queue. Accept the existing token types.
- yield from queue
- queue.clear()
-
- yield start, Error, character
-
- # Yield any remaining text.
- yield from queue
- if in_string:
- yield start, Error, text[start:]
- elif in_float:
- yield start, Number.Float, text[start:]
- elif in_number:
- yield start, Number.Integer, text[start:]
- elif in_constant:
- yield start, Keyword.Constant, text[start:]
- elif in_whitespace:
+ yield _start, _token, _text
+ elif _token is String.Double:
+ yield _start, Name.Tag, _text
+ else:
+ yield _start, Error, _text
+ queue.clear()
+
+ in_punctuation = True
+
+ elif character in self.punctuations:
+ # Exhaust the queue. Accept the existing token types.
+ yield from queue
+ queue.clear()
+
+ in_punctuation = True
+
+ else:
+ # Exhaust the queue. Accept the existing token types.
+ yield from queue
+ queue.clear()
+
+ yield start, Error, character
+
+ # Yield any remaining text.
+ yield from queue
+ if in_string:
+ yield start, Error, text[start:]
+ elif in_float:
+ yield start, Number.Float, text[start:]
+ elif in_number:
+ yield start, Number.Integer, text[start:]
+ elif in_constant:
+ yield start, Keyword.Constant, text[start:]
+ elif in_whitespace:
yield start, Whitespace, text[start:]
- elif in_punctuation:
- yield start, Punctuation, text[start:]
-
-
+ elif in_punctuation:
+ yield start, Punctuation, text[start:]
+
+
class JsonBareObjectLexer(JsonLexer):
"""
For JSON data structures (with missing object curly braces).
.. versionadded:: 2.2
-
- .. deprecated:: 2.8.0
-
- Behaves the same as `JsonLexer` now.
+
+ .. deprecated:: 2.8.0
+
+ Behaves the same as `JsonLexer` now.
"""
name = 'JSONBareObject'
- aliases = []
+ aliases = []
filenames = []
- mimetypes = []
+ mimetypes = []
class JsonLdLexer(JsonLexer):
"""
- For `JSON-LD <https://json-ld.org/>`_ linked data.
+ For `JSON-LD <https://json-ld.org/>`_ linked data.
.. versionadded:: 2.0
"""
@@ -664,38 +664,38 @@ class JsonLdLexer(JsonLexer):
filenames = ['*.jsonld']
mimetypes = ['application/ld+json']
- json_ld_keywords = {
- '"@%s"' % keyword
- for keyword in (
- 'base',
- 'container',
- 'context',
- 'direction',
- 'graph',
- 'id',
- 'import',
- 'included',
- 'index',
- 'json',
- 'language',
- 'list',
- 'nest',
- 'none',
- 'prefix',
- 'propagate',
- 'protected',
- 'reverse',
- 'set',
- 'type',
- 'value',
- 'version',
- 'vocab',
- )
+ json_ld_keywords = {
+ '"@%s"' % keyword
+ for keyword in (
+ 'base',
+ 'container',
+ 'context',
+ 'direction',
+ 'graph',
+ 'id',
+ 'import',
+ 'included',
+ 'index',
+ 'json',
+ 'language',
+ 'list',
+ 'nest',
+ 'none',
+ 'prefix',
+ 'propagate',
+ 'protected',
+ 'reverse',
+ 'set',
+ 'type',
+ 'value',
+ 'version',
+ 'vocab',
+ )
}
-
- def get_tokens_unprocessed(self, text):
- for start, token, value in super().get_tokens_unprocessed(text):
- if token is Name.Tag and value in self.json_ld_keywords:
- yield start, Name.Decorator, value
- else:
- yield start, token, value
+
+ def get_tokens_unprocessed(self, text):
+ for start, token, value in super().get_tokens_unprocessed(text):
+ if token is Name.Tag and value in self.json_ld_keywords:
+ yield start, Name.Decorator, value
+ else:
+ yield start, token, value
diff --git a/contrib/python/Pygments/py3/pygments/lexers/devicetree.py b/contrib/python/Pygments/py3/pygments/lexers/devicetree.py
index cb25a330fd..05d1b8d239 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/devicetree.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/devicetree.py
@@ -1,108 +1,108 @@
-"""
- pygments.lexers.devicetree
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Devicetree language.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include, default, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
+"""
+ pygments.lexers.devicetree
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Devicetree language.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include, default, words
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
Punctuation, String, Text, Whitespace
-
-__all__ = ['DevicetreeLexer']
-
-
-class DevicetreeLexer(RegexLexer):
- """
- Lexer for `Devicetree <https://www.devicetree.org/>`_ files.
-
- .. versionadded:: 2.7
- """
-
- name = 'Devicetree'
- aliases = ['devicetree', 'dts']
- filenames = ['*.dts', '*.dtsi']
- mimetypes = ['text/x-c']
-
- #: optional Whitespace or /*...*/ style comment
- _ws = r'\s*(?:/[*][^*/]*?[*]/\s*)*'
-
- tokens = {
- 'macro': [
- # Include preprocessor directives (C style):
- (r'(#include)(' + _ws + r')([^\n]+)',
- bygroups(Comment.Preproc, Comment.Multiline, Comment.PreprocFile)),
- # Define preprocessor directives (C style):
- (r'(#define)(' + _ws + r')([^\n]+)',
- bygroups(Comment.Preproc, Comment.Multiline, Comment.Preproc)),
- # devicetree style with file:
- (r'(/[^*/{]+/)(' + _ws + r')("[^\n{]+")',
- bygroups(Comment.Preproc, Comment.Multiline, Comment.PreprocFile)),
- # devicetree style with property:
- (r'(/[^*/{]+/)(' + _ws + r')([^\n;{]*)([;]?)',
- bygroups(Comment.Preproc, Comment.Multiline, Comment.Preproc, Punctuation)),
- ],
- 'whitespace': [
+
+__all__ = ['DevicetreeLexer']
+
+
+class DevicetreeLexer(RegexLexer):
+ """
+ Lexer for `Devicetree <https://www.devicetree.org/>`_ files.
+
+ .. versionadded:: 2.7
+ """
+
+ name = 'Devicetree'
+ aliases = ['devicetree', 'dts']
+ filenames = ['*.dts', '*.dtsi']
+ mimetypes = ['text/x-c']
+
+ #: optional Whitespace or /*...*/ style comment
+ _ws = r'\s*(?:/[*][^*/]*?[*]/\s*)*'
+
+ tokens = {
+ 'macro': [
+ # Include preprocessor directives (C style):
+ (r'(#include)(' + _ws + r')([^\n]+)',
+ bygroups(Comment.Preproc, Comment.Multiline, Comment.PreprocFile)),
+ # Define preprocessor directives (C style):
+ (r'(#define)(' + _ws + r')([^\n]+)',
+ bygroups(Comment.Preproc, Comment.Multiline, Comment.Preproc)),
+ # devicetree style with file:
+ (r'(/[^*/{]+/)(' + _ws + r')("[^\n{]+")',
+ bygroups(Comment.Preproc, Comment.Multiline, Comment.PreprocFile)),
+ # devicetree style with property:
+ (r'(/[^*/{]+/)(' + _ws + r')([^\n;{]*)([;]?)',
+ bygroups(Comment.Preproc, Comment.Multiline, Comment.Preproc, Punctuation)),
+ ],
+ 'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
- (r'\\\n', Text), # line continuation
- (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
- # Open until EOF, so no ending delimeter
- (r'/(\\\n)?[*][\w\W]*', Comment.Multiline),
- ],
- 'statements': [
- (r'(L?)(")', bygroups(String.Affix, String), 'string'),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'([^\s{}/*]*)(\s*)(:)', bygroups(Name.Label, Text, Punctuation), '#pop'),
- (words(('compatible', 'model', 'phandle', 'status', '#address-cells',
- '#size-cells', 'reg', 'virtual-reg', 'ranges', 'dma-ranges',
- 'device_type', 'name'), suffix=r'\b'), Keyword.Reserved),
- (r'([~!%^&*+=|?:<>/#-])', Operator),
- (r'[()\[\]{},.]', Punctuation),
- (r'[a-zA-Z_][\w-]*(?=(?:\s*,\s*[a-zA-Z_][\w-]*|(?:' + _ws + r'))*\s*[=;])',
- Name),
- (r'[a-zA-Z_]\w*', Name.Attribute),
- ],
- 'root': [
- include('whitespace'),
- include('macro'),
-
- # Nodes
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
+ # Open until EOF, so no ending delimeter
+ (r'/(\\\n)?[*][\w\W]*', Comment.Multiline),
+ ],
+ 'statements': [
+ (r'(L?)(")', bygroups(String.Affix, String), 'string'),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'([^\s{}/*]*)(\s*)(:)', bygroups(Name.Label, Text, Punctuation), '#pop'),
+ (words(('compatible', 'model', 'phandle', 'status', '#address-cells',
+ '#size-cells', 'reg', 'virtual-reg', 'ranges', 'dma-ranges',
+ 'device_type', 'name'), suffix=r'\b'), Keyword.Reserved),
+ (r'([~!%^&*+=|?:<>/#-])', Operator),
+ (r'[()\[\]{},.]', Punctuation),
+ (r'[a-zA-Z_][\w-]*(?=(?:\s*,\s*[a-zA-Z_][\w-]*|(?:' + _ws + r'))*\s*[=;])',
+ Name),
+ (r'[a-zA-Z_]\w*', Name.Attribute),
+ ],
+ 'root': [
+ include('whitespace'),
+ include('macro'),
+
+ # Nodes
(r'([^/*@\s&]+|/)(@?)((?:0x)?[0-9a-fA-F,]*)(' + _ws + r')(\{)',
- bygroups(Name.Function, Operator, Number.Integer,
- Comment.Multiline, Punctuation), 'node'),
-
- default('statement'),
- ],
- 'statement': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation, '#pop'),
- ],
- 'node': [
- include('whitespace'),
- include('macro'),
-
+ bygroups(Name.Function, Operator, Number.Integer,
+ Comment.Multiline, Punctuation), 'node'),
+
+ default('statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation, '#pop'),
+ ],
+ 'node': [
+ include('whitespace'),
+ include('macro'),
+
(r'([^/*@\s&]+|/)(@?)((?:0x)?[0-9a-fA-F,]*)(' + _ws + r')(\{)',
- bygroups(Name.Function, Operator, Number.Integer,
- Comment.Multiline, Punctuation), '#push'),
-
- include('statements'),
-
- (r'\};', Punctuation, '#pop'),
- (';', Punctuation),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- }
+ bygroups(Name.Function, Operator, Number.Integer,
+ Comment.Multiline, Punctuation), '#push'),
+
+ include('statements'),
+
+ (r'\};', Punctuation, '#pop'),
+ (';', Punctuation),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
+ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/diff.py b/contrib/python/Pygments/py3/pygments/lexers/diff.py
index a694bd68e6..43a76a16cb 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/diff.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/diff.py
@@ -4,7 +4,7 @@
Lexers for diff/patch formats.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -113,9 +113,9 @@ class WDiffLexer(RegexLexer):
Note that:
- * It only works with normal output (without options like ``-l``).
- * If the target files contain "[-", "-]", "{+", or "+}",
- especially they are unbalanced, the lexer will get confused.
+ * It only works with normal output (without options like ``-l``).
+ * If the target files contain "[-", "-]", "{+", or "+}",
+ especially they are unbalanced, the lexer will get confused.
.. versionadded:: 2.2
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dotnet.py b/contrib/python/Pygments/py3/pygments/lexers/dotnet.py
index c04d2a0a92..29f6015152 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dotnet.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dotnet.py
@@ -4,7 +4,7 @@
Lexers for .net languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
@@ -13,7 +13,7 @@ from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \
using, this, default, words
from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
Name, String, Number, Literal, Other, Whitespace
-from pygments.util import get_choice_opt
+from pygments.util import get_choice_opt
from pygments import unistring as uni
from pygments.lexers.html import XmlLexer
@@ -70,7 +70,7 @@ class CSharpLexer(RegexLexer):
tokens = {}
token_variants = True
- for levelname, cs_ident in levels.items():
+ for levelname, cs_ident in levels.items():
tokens[levelname] = {
'root': [
# method names
@@ -88,7 +88,7 @@ class CSharpLexer(RegexLexer):
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
(r'[{}]', Punctuation),
(r'@"(""|[^"])*"', String),
- (r'\$?"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String),
+ (r'\$?"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String),
(r"'\\.'|'[^\\]'", String.Char),
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
@@ -185,7 +185,7 @@ class NemerleLexer(RegexLexer):
tokens = {}
token_variants = True
- for levelname, cs_ident in levels.items():
+ for levelname, cs_ident in levels.items():
tokens[levelname] = {
'root': [
# method names
@@ -218,7 +218,7 @@ class NemerleLexer(RegexLexer):
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
(r'[{}]', Punctuation),
(r'@"(""|[^"])*"', String),
- (r'"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String),
+ (r'"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String),
(r"'\\.'|'[^\\]'", String.Char),
(r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?", Number),
@@ -304,17 +304,17 @@ class NemerleLexer(RegexLexer):
RegexLexer.__init__(self, **options)
- def analyse_text(text):
- """Nemerle is quite similar to Python, but @if is relatively uncommon
- elsewhere."""
- result = 0
-
- if '@if' in text:
- result += 0.1
-
- return result
-
-
+ def analyse_text(text):
+ """Nemerle is quite similar to Python, but @if is relatively uncommon
+ elsewhere."""
+ result = 0
+
+ if '@if' in text:
+ result += 0.1
+
+ return result
+
+
class BooLexer(RegexLexer):
"""
For `Boo <http://boo.codehaus.org/>`_ source code.
@@ -334,8 +334,8 @@ class BooLexer(RegexLexer):
(r'(\\)(\n)', bygroups(Text, Whitespace)),
(r'\\', Text),
(r'(in|is|and|or|not)\b', Operator.Word),
- (r'/(\\\\|\\[^\\]|[^/\\\s])/', String.Regex),
- (r'@/(\\\\|\\[^\\]|[^/\\])*/', String.Regex),
+ (r'/(\\\\|\\[^\\]|[^/\\\s])/', String.Regex),
+ (r'@/(\\\\|\\[^\\]|[^/\\])*/', String.Regex),
(r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
(r'(as|abstract|callable|constructor|destructor|do|import|'
r'enum|event|final|get|interface|internal|of|override|'
@@ -354,8 +354,8 @@ class BooLexer(RegexLexer):
r'rawArrayIndexing|required|typeof|unchecked|using|'
r'yieldAll|zip)\b', Name.Builtin),
(r'"""(\\\\|\\"|.*?)"""', String.Double),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'[a-zA-Z_]\w*', Name),
(r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
(r'[0-9][0-9.]*(ms?|d|h|s)', Number),
@@ -527,7 +527,7 @@ class CSharpAspxLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super().__init__(CSharpLexer, GenericAspxLexer, **options)
+ super().__init__(CSharpLexer, GenericAspxLexer, **options)
def analyse_text(text):
if re.search(r'Page\s*Language="C#"', text, re.I) is not None:
@@ -547,7 +547,7 @@ class VbNetAspxLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super().__init__(VbNetLexer, GenericAspxLexer, **options)
+ super().__init__(VbNetLexer, GenericAspxLexer, **options)
def analyse_text(text):
if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
@@ -705,14 +705,14 @@ class FSharpLexer(RegexLexer):
(r'"', String),
],
}
-
- def analyse_text(text):
- """F# doesn't have that many unique features -- |> and <| are weak
- indicators."""
- result = 0
- if '|>' in text:
- result += 0.05
- if '<|' in text:
- result += 0.05
-
- return result
+
+ def analyse_text(text):
+ """F# doesn't have that many unique features -- |> and <| are weak
+ indicators."""
+ result = 0
+ if '|>' in text:
+ result += 0.05
+ if '<|' in text:
+ result += 0.05
+
+ return result
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dsls.py b/contrib/python/Pygments/py3/pygments/lexers/dsls.py
index b6847d0447..907e05bde1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dsls.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dsls.py
@@ -4,7 +4,7 @@
Lexers for various domain-specific languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,9 +13,9 @@ import re
from pygments.lexer import ExtendedRegexLexer, RegexLexer, bygroups, words, \
include, default, this, using, combined
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
+ Number, Punctuation, Whitespace
-__all__ = ['ProtoBufLexer', 'ZeekLexer', 'PuppetLexer', 'RslLexer',
+__all__ = ['ProtoBufLexer', 'ZeekLexer', 'PuppetLexer', 'RslLexer',
'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer',
'CrmshLexer', 'ThriftLexer', 'FlatlineLexer', 'SnowballLexer']
@@ -39,9 +39,9 @@ class ProtoBufLexer(RegexLexer):
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
(words((
- 'import', 'option', 'optional', 'required', 'repeated',
- 'reserved', 'default', 'packed', 'ctype', 'extensions', 'to',
- 'max', 'rpc', 'returns', 'oneof', 'syntax'), prefix=r'\b', suffix=r'\b'),
+ 'import', 'option', 'optional', 'required', 'repeated',
+ 'reserved', 'default', 'packed', 'ctype', 'extensions', 'to',
+ 'max', 'rpc', 'returns', 'oneof', 'syntax'), prefix=r'\b', suffix=r'\b'),
Keyword),
(words((
'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
@@ -187,167 +187,167 @@ class ThriftLexer(RegexLexer):
}
-class ZeekLexer(RegexLexer):
+class ZeekLexer(RegexLexer):
"""
- For `Zeek <https://www.zeek.org/>`_ scripts.
+ For `Zeek <https://www.zeek.org/>`_ scripts.
- .. versionadded:: 2.5
+ .. versionadded:: 2.5
"""
- name = 'Zeek'
- aliases = ['zeek', 'bro']
- filenames = ['*.zeek', '*.bro']
+ name = 'Zeek'
+ aliases = ['zeek', 'bro']
+ filenames = ['*.zeek', '*.bro']
- _hex = r'[0-9a-fA-F]'
+ _hex = r'[0-9a-fA-F]'
_float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
_h = r'[A-Za-z0-9][-A-Za-z0-9]*'
tokens = {
'root': [
- include('whitespace'),
- include('comments'),
- include('directives'),
- include('attributes'),
- include('types'),
- include('keywords'),
- include('literals'),
- include('operators'),
- include('punctuation'),
- (r'((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(?=\s*\()',
- Name.Function),
- include('identifiers'),
- ],
-
- 'whitespace': [
+ include('whitespace'),
+ include('comments'),
+ include('directives'),
+ include('attributes'),
+ include('types'),
+ include('keywords'),
+ include('literals'),
+ include('operators'),
+ include('punctuation'),
+ (r'((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(?=\s*\()',
+ Name.Function),
+ include('identifiers'),
+ ],
+
+ 'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'(\\)(\n)', bygroups(Text, Whitespace)),
- ],
-
- 'comments': [
- (r'#.*$', Comment),
- ],
-
- 'directives': [
- (r'@(load-plugin|load-sigs|load|unload)\b.*$', Comment.Preproc),
- (r'@(DEBUG|DIR|FILENAME|deprecated|if|ifdef|ifndef|else|endif)\b', Comment.Preproc),
+ ],
+
+ 'comments': [
+ (r'#.*$', Comment),
+ ],
+
+ 'directives': [
+ (r'@(load-plugin|load-sigs|load|unload)\b.*$', Comment.Preproc),
+ (r'@(DEBUG|DIR|FILENAME|deprecated|if|ifdef|ifndef|else|endif)\b', Comment.Preproc),
(r'(@prefixes)(\s*)((\+?=).*)$', bygroups(Comment.Preproc,
Whitespace, Comment.Preproc)),
- ],
-
- 'attributes': [
- (words(('redef', 'priority', 'log', 'optional', 'default', 'add_func',
- 'delete_func', 'expire_func', 'read_expire', 'write_expire',
- 'create_expire', 'synchronized', 'persistent', 'rotate_interval',
- 'rotate_size', 'encrypt', 'raw_output', 'mergeable', 'error_handler',
- 'type_column', 'deprecated'),
- prefix=r'&', suffix=r'\b'),
- Keyword.Pseudo),
- ],
-
- 'types': [
- (words(('any',
- 'enum', 'record', 'set', 'table', 'vector',
- 'function', 'hook', 'event',
- 'addr', 'bool', 'count', 'double', 'file', 'int', 'interval',
- 'pattern', 'port', 'string', 'subnet', 'time'),
- suffix=r'\b'),
- Keyword.Type),
-
- (r'(opaque)(\s+)(of)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
+ ],
+
+ 'attributes': [
+ (words(('redef', 'priority', 'log', 'optional', 'default', 'add_func',
+ 'delete_func', 'expire_func', 'read_expire', 'write_expire',
+ 'create_expire', 'synchronized', 'persistent', 'rotate_interval',
+ 'rotate_size', 'encrypt', 'raw_output', 'mergeable', 'error_handler',
+ 'type_column', 'deprecated'),
+ prefix=r'&', suffix=r'\b'),
+ Keyword.Pseudo),
+ ],
+
+ 'types': [
+ (words(('any',
+ 'enum', 'record', 'set', 'table', 'vector',
+ 'function', 'hook', 'event',
+ 'addr', 'bool', 'count', 'double', 'file', 'int', 'interval',
+ 'pattern', 'port', 'string', 'subnet', 'time'),
+ suffix=r'\b'),
+ Keyword.Type),
+
+ (r'(opaque)(\s+)(of)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
bygroups(Keyword.Type, Whitespace, Operator.Word, Whitespace, Keyword.Type)),
-
- (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)(\s*)\b(record|enum)\b',
+
+ (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)(\s*)\b(record|enum)\b',
bygroups(Keyword, Whitespace, Name.Class, Whitespace, Operator, Whitespace, Keyword.Type)),
-
- (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)',
+
+ (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)',
bygroups(Keyword, Whitespace, Name, Whitespace, Operator)),
-
- (r'(redef)(\s+)(record|enum)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
+
+ (r'(redef)(\s+)(record|enum)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
bygroups(Keyword, Whitespace, Keyword.Type, Whitespace, Name.Class)),
- ],
-
- 'keywords': [
- (words(('redef', 'export', 'if', 'else', 'for', 'while',
- 'return', 'break', 'next', 'continue', 'fallthrough',
- 'switch', 'default', 'case',
- 'add', 'delete',
- 'when', 'timeout', 'schedule'),
- suffix=r'\b'),
- Keyword),
- (r'(print)\b', Keyword),
- (r'(global|local|const|option)\b', Keyword.Declaration),
- (r'(module)(\s+)(([A-Za-z_]\w*)(?:::([A-Za-z_]\w*))*)\b',
+ ],
+
+ 'keywords': [
+ (words(('redef', 'export', 'if', 'else', 'for', 'while',
+ 'return', 'break', 'next', 'continue', 'fallthrough',
+ 'switch', 'default', 'case',
+ 'add', 'delete',
+ 'when', 'timeout', 'schedule'),
+ suffix=r'\b'),
+ Keyword),
+ (r'(print)\b', Keyword),
+ (r'(global|local|const|option)\b', Keyword.Declaration),
+ (r'(module)(\s+)(([A-Za-z_]\w*)(?:::([A-Za-z_]\w*))*)\b',
bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
- ],
-
- 'literals': [
- (r'"', String, 'string'),
-
- # Not the greatest match for patterns, but generally helps
- # disambiguate between start of a pattern and just a division
- # operator.
- (r'/(?=.*/)', String.Regex, 'regex'),
-
+ ],
+
+ 'literals': [
+ (r'"', String, 'string'),
+
+ # Not the greatest match for patterns, but generally helps
+ # disambiguate between start of a pattern and just a division
+ # operator.
+ (r'/(?=.*/)', String.Regex, 'regex'),
+
(r'(T|F)\b', Keyword.Constant),
-
- # Port
- (r'\d{1,5}/(udp|tcp|icmp|unknown)\b', Number),
-
- # IPv4 Address
- (r'(\d{1,3}.){3}(\d{1,3})\b', Number),
-
- # IPv6 Address
- (r'\[([0-9a-fA-F]{0,4}:){2,7}([0-9a-fA-F]{0,4})?((\d{1,3}.){3}(\d{1,3}))?\]', Number),
-
- # Numeric
- (r'0[xX]' + _hex + r'+\b', Number.Hex),
- (_float + r'\s*(day|hr|min|sec|msec|usec)s?\b', Number.Float),
- (_float + r'\b', Number.Float),
- (r'(\d+)\b', Number.Integer),
-
+
+ # Port
+ (r'\d{1,5}/(udp|tcp|icmp|unknown)\b', Number),
+
+ # IPv4 Address
+ (r'(\d{1,3}.){3}(\d{1,3})\b', Number),
+
+ # IPv6 Address
+ (r'\[([0-9a-fA-F]{0,4}:){2,7}([0-9a-fA-F]{0,4})?((\d{1,3}.){3}(\d{1,3}))?\]', Number),
+
+ # Numeric
+ (r'0[xX]' + _hex + r'+\b', Number.Hex),
+ (_float + r'\s*(day|hr|min|sec|msec|usec)s?\b', Number.Float),
+ (_float + r'\b', Number.Float),
+ (r'(\d+)\b', Number.Integer),
+
# Hostnames
(_h + r'(\.' + _h + r')+', String),
- ],
-
- 'operators': [
- (r'[!%*/+<=>~|&^-]', Operator),
+ ],
+
+ 'operators': [
+ (r'[!%*/+<=>~|&^-]', Operator),
(r'([-+=&|]{2}|[+=!><-]=)', Operator),
- (r'(in|as|is|of)\b', Operator.Word),
- (r'\??\$', Operator),
- ],
-
- 'punctuation': [
- (r'[{}()\[\],;.]', Punctuation),
- # The "ternary if", which uses '?' and ':', could instead be
- # treated as an Operator, but colons are more frequently used to
- # separate field/identifier names from their types, so the (often)
- # less-prominent Punctuation is used even with '?' for consistency.
- (r'[?:]', Punctuation),
- ],
-
- 'identifiers': [
- (r'([a-zA-Z_]\w*)(::)', bygroups(Name, Punctuation)),
+ (r'(in|as|is|of)\b', Operator.Word),
+ (r'\??\$', Operator),
+ ],
+
+ 'punctuation': [
+ (r'[{}()\[\],;.]', Punctuation),
+ # The "ternary if", which uses '?' and ':', could instead be
+ # treated as an Operator, but colons are more frequently used to
+ # separate field/identifier names from their types, so the (often)
+ # less-prominent Punctuation is used even with '?' for consistency.
+ (r'[?:]', Punctuation),
+ ],
+
+ 'identifiers': [
+ (r'([a-zA-Z_]\w*)(::)', bygroups(Name, Punctuation)),
(r'[a-zA-Z_]\w*', Name)
],
-
+
'string': [
- (r'\\.', String.Escape),
- (r'%-?[0-9]*(\.[0-9]+)?[DTd-gsx]', String.Escape),
+ (r'\\.', String.Escape),
+ (r'%-?[0-9]*(\.[0-9]+)?[DTd-gsx]', String.Escape),
(r'"', String, '#pop'),
- (r'.', String),
+ (r'.', String),
],
-
+
'regex': [
- (r'\\.', String.Escape),
+ (r'\\.', String.Escape),
(r'/', String.Regex, '#pop'),
- (r'.', String.Regex),
- ],
+ (r'.', String.Regex),
+ ],
}
-BroLexer = ZeekLexer
-
-
+BroLexer = ZeekLexer
+
+
class PuppetLexer(RegexLexer):
"""
For `Puppet <http://puppetlabs.com/>`__ configuration DSL.
@@ -635,7 +635,7 @@ class AlloyLexer(RegexLexer):
(iden_rex, Name),
(r'[:,]', Punctuation),
(r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r'\n', Whitespace),
]
}
@@ -643,7 +643,7 @@ class AlloyLexer(RegexLexer):
class PanLexer(RegexLexer):
"""
- Lexer for `pan <https://github.com/quattor/pan/>`_ source files.
+ Lexer for `pan <https://github.com/quattor/pan/>`_ source files.
Based on tcsh lexer.
@@ -831,7 +831,7 @@ class FlatlineLexer(RegexLexer):
(r'0x-?[a-f\d]+', Number.Hex),
# strings, symbols and characters
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r"\\(.|[a-z]+)", String.Char),
# expression template placeholder
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dylan.py b/contrib/python/Pygments/py3/pygments/lexers/dylan.py
index 74f81191dc..de2bc02457 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dylan.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dylan.py
@@ -4,7 +4,7 @@
Lexers for the Dylan language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -31,27 +31,27 @@ class DylanLexer(RegexLexer):
flags = re.IGNORECASE
- builtins = {
+ builtins = {
'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
'each-subclass', 'exception', 'exclude', 'function', 'generic',
'handler', 'inherited', 'inline', 'inline-only', 'instance',
'interface', 'import', 'keyword', 'library', 'macro', 'method',
'module', 'open', 'primary', 'required', 'sealed', 'sideways',
- 'singleton', 'slot', 'thread', 'variable', 'virtual'}
+ 'singleton', 'slot', 'thread', 'variable', 'virtual'}
- keywords = {
+ keywords = {
'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
- 'while'}
+ 'while'}
- operators = {
+ operators = {
'~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
- '>', '>=', '&', '|'}
+ '>', '>=', '&', '|'}
- functions = {
+ functions = {
'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
@@ -85,7 +85,7 @@ class DylanLexer(RegexLexer):
'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
- 'vector', 'zero?'}
+ 'vector', 'zero?'}
valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
@@ -276,11 +276,11 @@ class DylanConsoleLexer(Lexer):
curcode += line[end:]
else:
if curcode:
- yield from do_insertions(insertions,
- dylexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(insertions,
+ dylexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
yield match.start(), Generic.Output, line
if curcode:
- yield from do_insertions(insertions,
- dylexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(insertions,
+ dylexer.get_tokens_unprocessed(curcode))
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ecl.py b/contrib/python/Pygments/py3/pygments/lexers/ecl.py
index 47cad2220c..4a500e1e51 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ecl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ecl.py
@@ -4,7 +4,7 @@
Lexers for the ECL language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,7 +20,7 @@ __all__ = ['ECLLexer']
class ECLLexer(RegexLexer):
"""
Lexer for the declarative big-data `ECL
- <https://hpccsystems.com/training/documentation/ecl-language-reference/html>`_
+ <https://hpccsystems.com/training/documentation/ecl-language-reference/html>`_
language.
.. versionadded:: 1.5
@@ -121,17 +121,17 @@ class ECLLexer(RegexLexer):
(r'[^"\']+', String),
],
}
-
- def analyse_text(text):
- """This is very difficult to guess relative to other business languages.
- -> in conjuction with BEGIN/END seems relatively rare though."""
- result = 0
-
- if '->' in text:
- result += 0.01
- if 'BEGIN' in text:
- result += 0.01
- if 'END' in text:
- result += 0.01
-
- return result
+
+ def analyse_text(text):
+ """This is very difficult to guess relative to other business languages.
+ -> in conjuction with BEGIN/END seems relatively rare though."""
+ result = 0
+
+ if '->' in text:
+ result += 0.01
+ if 'BEGIN' in text:
+ result += 0.01
+ if 'END' in text:
+ result += 0.01
+
+ return result
diff --git a/contrib/python/Pygments/py3/pygments/lexers/eiffel.py b/contrib/python/Pygments/py3/pygments/lexers/eiffel.py
index 599b446306..628b767afc 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/eiffel.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/eiffel.py
@@ -4,7 +4,7 @@
Lexer for the Eiffel language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -46,7 +46,7 @@ class EiffelLexer(RegexLexer):
'require', 'rescue', 'retry', 'select', 'separate', 'then',
'undefine', 'until', 'variant', 'when'), prefix=r'(?i)\b', suffix=r'\b'),
Keyword.Reserved),
- (r'"\[([^\]%]|%(.|\n)|\][^"])*?\]"', String),
+ (r'"\[([^\]%]|%(.|\n)|\][^"])*?\]"', String),
(r'"([^"%\n]|%.)*?"', String),
include('numbers'),
(r"'([^'%]|%'|%%)'", String.Char),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/elm.py b/contrib/python/Pygments/py3/pygments/lexers/elm.py
index 298dbf5986..632bdae258 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/elm.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/elm.py
@@ -4,7 +4,7 @@
Lexer for the Elm programming language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -40,7 +40,7 @@ class ElmLexer(RegexLexer):
reservedWords = words((
'alias', 'as', 'case', 'else', 'if', 'import', 'in',
'let', 'module', 'of', 'port', 'then', 'type', 'where',
- ), suffix=r'\b')
+ ), suffix=r'\b')
tokens = {
'root': [
@@ -70,7 +70,7 @@ class ElmLexer(RegexLexer):
(reservedWords, Keyword.Reserved),
# Types
- (r'[A-Z][a-zA-Z0-9_]*', Keyword.Type),
+ (r'[A-Z][a-zA-Z0-9_]*', Keyword.Type),
# Main
(specialName, Keyword.Reserved),
@@ -79,7 +79,7 @@ class ElmLexer(RegexLexer):
(words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function),
# Infix Operators
- (words(builtinOps), Name.Function),
+ (words(builtinOps), Name.Function),
# Numbers
include('numbers'),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/email.py b/contrib/python/Pygments/py3/pygments/lexers/email.py
index 0c3fe5febe..5f12ac1044 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/email.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/email.py
@@ -1,150 +1,150 @@
-"""
- pygments.lexers.email
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the raw E-mail.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, DelegatingLexer, bygroups
-from pygments.lexers.mime import MIMELexer
-from pygments.token import Text, Keyword, Name, String, Number, Comment
-from pygments.util import get_bool_opt
-
-__all__ = ["EmailLexer"]
-
-
-class EmailHeaderLexer(RegexLexer):
- """
- Sub-lexer for raw E-mail. This lexer only process header part of e-mail.
-
- .. versionadded:: 2.5
- """
-
- def __init__(self, **options):
- super().__init__(**options)
- self.highlight_x = get_bool_opt(options, "highlight-X-header", False)
-
- def get_x_header_tokens(self, match):
- if self.highlight_x:
- # field
- yield match.start(1), Name.Tag, match.group(1)
-
- # content
- default_actions = self.get_tokens_unprocessed(
- match.group(2), stack=("root", "header"))
- yield from default_actions
- else:
- # lowlight
- yield match.start(1), Comment.Special, match.group(1)
- yield match.start(2), Comment.Multiline, match.group(2)
-
- tokens = {
- "root": [
- (r"^(?:[A-WYZ]|X400)[\w\-]*:", Name.Tag, "header"),
- (r"^(X-(?:\w[\w\-]*:))([\s\S]*?\n)(?![ \t])", get_x_header_tokens),
- ],
- "header": [
- # folding
- (r"\n[ \t]", Text.Whitespace),
- (r"\n(?![ \t])", Text.Whitespace, "#pop"),
-
- # keywords
- (r"\bE?SMTPS?\b", Keyword),
- (r"\b(?:HE|EH)LO\b", Keyword),
-
- # mailbox
- (r"[\w\.\-\+=]+@[\w\.\-]+", Name.Label),
- (r"<[\w\.\-\+=]+@[\w\.\-]+>", Name.Label),
-
- # domain
- (r"\b(\w[\w\.-]*\.[\w\.-]*\w[a-zA-Z]+)\b", Name.Function),
-
- # IPv4
- (
- r"(?<=\b)(?:(?:25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(?:25[0"
- r"-5]|2[0-4][0-9]|1?[0-9][0-9]?)(?=\b)",
- Number.Integer,
- ),
-
- # IPv6
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,7}:(?!\b)", Number.Hex),
- (r"(?<=\b):((:[0-9a-fA-F]{1,4}){1,7}|:)(?=\b)", Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
- (r"(?<=\b)[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})(?=\b)", Number.Hex),
- (r"(?<=\b)fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}(?=\b)", Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}(?=\b)", Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}(?=\b)",
- Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}(?=\b)",
- Number.Hex),
- (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}(?=\b)",
- Number.Hex),
- (
- r"(?<=\b)::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}"
- r"[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}"
- r"[0-9])(?=\b)",
- Number.Hex,
- ),
- (
- r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-"
- r"9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-"
- r"9])(?=\b)",
- Number.Hex,
- ),
-
- # Date time
- (
- r"(?:(Sun|Mon|Tue|Wed|Thu|Fri|Sat),\s+)?(0[1-9]|[1-2]?[0-9]|3["
- r"01])\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+("
- r"19[0-9]{2}|[2-9][0-9]{3})\s+(2[0-3]|[0-1][0-9]):([0-5][0-9])"
- r"(?::(60|[0-5][0-9]))?(?:\.\d{1,5})?\s+([-\+][0-9]{2}[0-5][0-"
- r"9]|\(?(?:UTC?|GMT|(?:E|C|M|P)(?:ST|ET|DT)|[A-IK-Z])\)?)",
- Name.Decorator,
- ),
-
- # RFC-2047 encoded string
- (
- r"(=\?)([\w-]+)(\?)([BbQq])(\?)([\[\w!\"#$%&\'()*+,-./:;<=>@[\\"
- r"\]^_`{|}~]+)(\?=)",
- bygroups(
- String.Affix,
- Name.Constant,
- String.Affix,
- Keyword.Constant,
- String.Affix,
- Number.Hex,
- String.Affix
- )
- ),
-
- # others
- (r'[\s]+', Text.Whitespace),
- (r'[\S]', Text),
- ],
- }
-
-
-class EmailLexer(DelegatingLexer):
- """
- Lexer for raw E-mail.
-
- Additional options accepted:
-
- `highlight-X-header`
- Highlight the fields of ``X-`` user-defined email header. (default:
- ``False``).
-
- .. versionadded:: 2.5
- """
-
- name = "E-mail"
- aliases = ["email", "eml"]
- filenames = ["*.eml"]
- mimetypes = ["message/rfc822"]
-
- def __init__(self, **options):
- super().__init__(EmailHeaderLexer, MIMELexer, Comment, **options)
+"""
+ pygments.lexers.email
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the raw E-mail.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, DelegatingLexer, bygroups
+from pygments.lexers.mime import MIMELexer
+from pygments.token import Text, Keyword, Name, String, Number, Comment
+from pygments.util import get_bool_opt
+
+__all__ = ["EmailLexer"]
+
+
+class EmailHeaderLexer(RegexLexer):
+ """
+ Sub-lexer for raw E-mail. This lexer only process header part of e-mail.
+
+ .. versionadded:: 2.5
+ """
+
+ def __init__(self, **options):
+ super().__init__(**options)
+ self.highlight_x = get_bool_opt(options, "highlight-X-header", False)
+
+ def get_x_header_tokens(self, match):
+ if self.highlight_x:
+ # field
+ yield match.start(1), Name.Tag, match.group(1)
+
+ # content
+ default_actions = self.get_tokens_unprocessed(
+ match.group(2), stack=("root", "header"))
+ yield from default_actions
+ else:
+ # lowlight
+ yield match.start(1), Comment.Special, match.group(1)
+ yield match.start(2), Comment.Multiline, match.group(2)
+
+ tokens = {
+ "root": [
+ (r"^(?:[A-WYZ]|X400)[\w\-]*:", Name.Tag, "header"),
+ (r"^(X-(?:\w[\w\-]*:))([\s\S]*?\n)(?![ \t])", get_x_header_tokens),
+ ],
+ "header": [
+ # folding
+ (r"\n[ \t]", Text.Whitespace),
+ (r"\n(?![ \t])", Text.Whitespace, "#pop"),
+
+ # keywords
+ (r"\bE?SMTPS?\b", Keyword),
+ (r"\b(?:HE|EH)LO\b", Keyword),
+
+ # mailbox
+ (r"[\w\.\-\+=]+@[\w\.\-]+", Name.Label),
+ (r"<[\w\.\-\+=]+@[\w\.\-]+>", Name.Label),
+
+ # domain
+ (r"\b(\w[\w\.-]*\.[\w\.-]*\w[a-zA-Z]+)\b", Name.Function),
+
+ # IPv4
+ (
+ r"(?<=\b)(?:(?:25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(?:25[0"
+ r"-5]|2[0-4][0-9]|1?[0-9][0-9]?)(?=\b)",
+ Number.Integer,
+ ),
+
+ # IPv6
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,7}:(?!\b)", Number.Hex),
+ (r"(?<=\b):((:[0-9a-fA-F]{1,4}){1,7}|:)(?=\b)", Number.Hex),
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
+ (r"(?<=\b)[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})(?=\b)", Number.Hex),
+ (r"(?<=\b)fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}(?=\b)", Number.Hex),
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}(?=\b)", Number.Hex),
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}(?=\b)",
+ Number.Hex),
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}(?=\b)",
+ Number.Hex),
+ (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}(?=\b)",
+ Number.Hex),
+ (
+ r"(?<=\b)::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}"
+ r"[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}"
+ r"[0-9])(?=\b)",
+ Number.Hex,
+ ),
+ (
+ r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-"
+ r"9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-"
+ r"9])(?=\b)",
+ Number.Hex,
+ ),
+
+ # Date time
+ (
+ r"(?:(Sun|Mon|Tue|Wed|Thu|Fri|Sat),\s+)?(0[1-9]|[1-2]?[0-9]|3["
+ r"01])\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+("
+ r"19[0-9]{2}|[2-9][0-9]{3})\s+(2[0-3]|[0-1][0-9]):([0-5][0-9])"
+ r"(?::(60|[0-5][0-9]))?(?:\.\d{1,5})?\s+([-\+][0-9]{2}[0-5][0-"
+ r"9]|\(?(?:UTC?|GMT|(?:E|C|M|P)(?:ST|ET|DT)|[A-IK-Z])\)?)",
+ Name.Decorator,
+ ),
+
+ # RFC-2047 encoded string
+ (
+ r"(=\?)([\w-]+)(\?)([BbQq])(\?)([\[\w!\"#$%&\'()*+,-./:;<=>@[\\"
+ r"\]^_`{|}~]+)(\?=)",
+ bygroups(
+ String.Affix,
+ Name.Constant,
+ String.Affix,
+ Keyword.Constant,
+ String.Affix,
+ Number.Hex,
+ String.Affix
+ )
+ ),
+
+ # others
+ (r'[\s]+', Text.Whitespace),
+ (r'[\S]', Text),
+ ],
+ }
+
+
+class EmailLexer(DelegatingLexer):
+ """
+ Lexer for raw E-mail.
+
+ Additional options accepted:
+
+ `highlight-X-header`
+ Highlight the fields of ``X-`` user-defined email header. (default:
+ ``False``).
+
+ .. versionadded:: 2.5
+ """
+
+ name = "E-mail"
+ aliases = ["email", "eml"]
+ filenames = ["*.eml"]
+ mimetypes = ["message/rfc822"]
+
+ def __init__(self, **options):
+ super().__init__(EmailHeaderLexer, MIMELexer, Comment, **options)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/erlang.py b/contrib/python/Pygments/py3/pygments/lexers/erlang.py
index 2563ffc263..1e496729e1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/erlang.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/erlang.py
@@ -4,7 +4,7 @@
Lexers for Erlang.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -162,7 +162,7 @@ class ErlangShellLexer(Lexer):
filenames = ['*.erl-sh']
mimetypes = ['text/x-erl-shellsession']
- _prompt_re = re.compile(r'(?:\([\w@_.]+\))?\d+>(?=\s|\Z)')
+ _prompt_re = re.compile(r'(?:\([\w@_.]+\))?\d+>(?=\s|\Z)')
def get_tokens_unprocessed(self, text):
erlexer = ErlangLexer(**self.options)
@@ -179,8 +179,8 @@ class ErlangShellLexer(Lexer):
curcode += line[end:]
else:
if curcode:
- yield from do_insertions(insertions,
- erlexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(insertions,
+ erlexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
if line.startswith('*'):
@@ -188,8 +188,8 @@ class ErlangShellLexer(Lexer):
else:
yield match.start(), Generic.Output, line
if curcode:
- yield from do_insertions(insertions,
- erlexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(insertions,
+ erlexer.get_tokens_unprocessed(curcode))
def gen_elixir_string_rules(name, symbol, token):
@@ -204,10 +204,10 @@ def gen_elixir_string_rules(name, symbol, token):
return states
-def gen_elixir_sigstr_rules(term, term_class, token, interpol=True):
+def gen_elixir_sigstr_rules(term, term_class, token, interpol=True):
if interpol:
return [
- (r'[^#%s\\]+' % (term_class,), token),
+ (r'[^#%s\\]+' % (term_class,), token),
include('escapes'),
(r'\\.', token),
(r'%s[a-zA-Z]*' % (term,), token, '#pop'),
@@ -215,7 +215,7 @@ def gen_elixir_sigstr_rules(term, term_class, token, interpol=True):
]
else:
return [
- (r'[^%s\\]+' % (term_class,), token),
+ (r'[^%s\\]+' % (term_class,), token),
(r'\\.', token),
(r'%s[a-zA-Z]*' % (term,), token, '#pop'),
]
@@ -230,7 +230,7 @@ class ElixirLexer(RegexLexer):
name = 'Elixir'
aliases = ['elixir', 'ex', 'exs']
- filenames = ['*.ex', '*.eex', '*.exs', '*.leex']
+ filenames = ['*.ex', '*.eex', '*.exs', '*.leex']
mimetypes = ['text/x-elixir']
KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch')
@@ -288,14 +288,14 @@ class ElixirLexer(RegexLexer):
def gen_elixir_sigil_rules():
# all valid sigil terminators (excluding heredocs)
terminators = [
- (r'\{', r'\}', '}', 'cb'),
- (r'\[', r'\]', r'\]', 'sb'),
- (r'\(', r'\)', ')', 'pa'),
- ('<', '>', '>', 'ab'),
- ('/', '/', '/', 'slas'),
- (r'\|', r'\|', '|', 'pipe'),
- ('"', '"', '"', 'quot'),
- ("'", "'", "'", 'apos'),
+ (r'\{', r'\}', '}', 'cb'),
+ (r'\[', r'\]', r'\]', 'sb'),
+ (r'\(', r'\)', ')', 'pa'),
+ ('<', '>', '>', 'ab'),
+ ('/', '/', '/', 'slas'),
+ (r'\|', r'\|', '|', 'pipe'),
+ ('"', '"', '"', 'quot'),
+ ("'", "'", "'", 'apos'),
]
# heredocs have slightly different rules
@@ -325,15 +325,15 @@ class ElixirLexer(RegexLexer):
include('heredoc_no_interpol'),
]
- for lterm, rterm, rterm_class, name in terminators:
+ for lterm, rterm, rterm_class, name in terminators:
states['sigils'] += [
(r'~[a-z]' + lterm, token, name + '-intp'),
(r'~[A-Z]' + lterm, token, name + '-no-intp'),
]
- states[name + '-intp'] = \
- gen_elixir_sigstr_rules(rterm, rterm_class, token)
+ states[name + '-intp'] = \
+ gen_elixir_sigstr_rules(rterm, rterm_class, token)
states[name + '-no-intp'] = \
- gen_elixir_sigstr_rules(rterm, rterm_class, token, interpol=False)
+ gen_elixir_sigstr_rules(rterm, rterm_class, token, interpol=False)
return states
@@ -495,7 +495,7 @@ class ElixirConsoleLexer(Lexer):
aliases = ['iex']
mimetypes = ['text/x-elixir-shellsession']
- _prompt_re = re.compile(r'(iex|\.{3})((?:\([\w@_.]+\))?\d+|\(\d+\))?> ')
+ _prompt_re = re.compile(r'(iex|\.{3})((?:\([\w@_.]+\))?\d+|\(\d+\))?> ')
def get_tokens_unprocessed(self, text):
exlexer = ElixirLexer(**self.options)
@@ -505,7 +505,7 @@ class ElixirConsoleLexer(Lexer):
insertions = []
for match in line_re.finditer(text):
line = match.group()
- if line.startswith('** '):
+ if line.startswith('** '):
in_error = True
insertions.append((len(curcode),
[(0, Generic.Error, line[:-1])]))
@@ -520,12 +520,12 @@ class ElixirConsoleLexer(Lexer):
curcode += line[end:]
else:
if curcode:
- yield from do_insertions(
- insertions, exlexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(
+ insertions, exlexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
token = Generic.Error if in_error else Generic.Output
yield match.start(), token, line
if curcode:
- yield from do_insertions(
- insertions, exlexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(
+ insertions, exlexer.get_tokens_unprocessed(curcode))
diff --git a/contrib/python/Pygments/py3/pygments/lexers/esoteric.py b/contrib/python/Pygments/py3/pygments/lexers/esoteric.py
index a884d4687b..4af1a5797c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/esoteric.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/esoteric.py
@@ -4,7 +4,7 @@
Lexers for esoteric languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -47,32 +47,32 @@ class BrainfuckLexer(RegexLexer):
]
}
- def analyse_text(text):
- """It's safe to assume that a program which mostly consists of + -
- and < > is brainfuck."""
- plus_minus_count = 0
- greater_less_count = 0
-
- range_to_check = max(256, len(text))
-
- for c in text[:range_to_check]:
- if c == '+' or c == '-':
- plus_minus_count += 1
- if c == '<' or c == '>':
- greater_less_count += 1
-
- if plus_minus_count > (0.25 * range_to_check):
- return 1.0
- if greater_less_count > (0.25 * range_to_check):
- return 1.0
-
- result = 0
- if '[-]' in text:
- result += 0.5
-
- return result
-
-
+ def analyse_text(text):
+ """It's safe to assume that a program which mostly consists of + -
+ and < > is brainfuck."""
+ plus_minus_count = 0
+ greater_less_count = 0
+
+ range_to_check = max(256, len(text))
+
+ for c in text[:range_to_check]:
+ if c == '+' or c == '-':
+ plus_minus_count += 1
+ if c == '<' or c == '>':
+ greater_less_count += 1
+
+ if plus_minus_count > (0.25 * range_to_check):
+ return 1.0
+ if greater_less_count > (0.25 * range_to_check):
+ return 1.0
+
+ result = 0
+ if '[-]' in text:
+ result += 0.5
+
+ return result
+
+
class BefungeLexer(RegexLexer):
"""
Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
@@ -283,23 +283,23 @@ class AheuiLexer(RegexLexer):
tokens = {
'root': [
- ('['
- '나-낳냐-냫너-넣녀-녛노-놓뇨-눟뉴-닇'
- '다-닿댜-댷더-덯뎌-뎧도-돟됴-둫듀-딓'
- '따-땋땨-떃떠-떻뗘-뗳또-똫뚀-뚷뜌-띟'
- '라-랗랴-럏러-렇려-렿로-롷료-뤃류-릫'
- '마-맣먀-먛머-멓며-몋모-뫃묘-뭏뮤-믷'
- '바-밯뱌-뱧버-벟벼-볗보-봏뵤-붛뷰-빃'
- '빠-빻뺘-뺳뻐-뻫뼈-뼣뽀-뽛뾰-뿧쀼-삏'
- '사-샇샤-샿서-섷셔-셯소-솧쇼-숳슈-싛'
- '싸-쌓쌰-썋써-쎃쎠-쎻쏘-쏳쑈-쑿쓔-씧'
- '자-잫쟈-쟣저-젛져-졓조-좋죠-줗쥬-즿'
- '차-챃챠-챻처-첳쳐-쳫초-촣쵸-춯츄-칗'
- '카-캏캬-컇커-컿켜-켷코-콯쿄-쿻큐-킣'
- '타-탛탸-턓터-텋텨-톃토-톻툐-퉇튜-틯'
- '파-팧퍄-퍟퍼-펗펴-폏포-퐇표-풓퓨-픻'
- '하-핳햐-햫허-헣혀-혛호-홓효-훟휴-힇'
- ']', Operator),
+ ('['
+ '나-낳냐-냫너-넣녀-녛노-놓뇨-눟뉴-닇'
+ '다-닿댜-댷더-덯뎌-뎧도-돟됴-둫듀-딓'
+ '따-땋땨-떃떠-떻뗘-뗳또-똫뚀-뚷뜌-띟'
+ '라-랗랴-럏러-렇려-렿로-롷료-뤃류-릫'
+ '마-맣먀-먛머-멓며-몋모-뫃묘-뭏뮤-믷'
+ '바-밯뱌-뱧버-벟벼-볗보-봏뵤-붛뷰-빃'
+ '빠-빻뺘-뺳뻐-뻫뼈-뼣뽀-뽛뾰-뿧쀼-삏'
+ '사-샇샤-샿서-섷셔-셯소-솧쇼-숳슈-싛'
+ '싸-쌓쌰-썋써-쎃쎠-쎻쏘-쏳쑈-쑿쓔-씧'
+ '자-잫쟈-쟣저-젛져-졓조-좋죠-줗쥬-즿'
+ '차-챃챠-챻처-첳쳐-쳫초-촣쵸-춯츄-칗'
+ '카-캏캬-컇커-컿켜-켷코-콯쿄-쿻큐-킣'
+ '타-탛탸-턓터-텋텨-톃토-톻툐-퉇튜-틯'
+ '파-팧퍄-퍟퍼-펗펴-폏포-퐇표-풓퓨-픻'
+ '하-핳햐-햫허-헣혀-혛호-홓효-훟휴-힇'
+ ']', Operator),
('.', Comment),
],
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ezhil.py b/contrib/python/Pygments/py3/pygments/lexers/ezhil.py
index 6d282c96bb..567f063eb1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ezhil.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ezhil.py
@@ -4,7 +4,7 @@
Pygments lexers for Ezhil language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -29,20 +29,20 @@ class EzhilLexer(RegexLexer):
flags = re.MULTILINE | re.UNICODE
# Refer to tamil.utf8.tamil_letters from open-tamil for a stricter version of this.
# This much simpler version is close enough, and includes combining marks.
- _TALETTERS = '[a-zA-Z_]|[\u0b80-\u0bff]'
+ _TALETTERS = '[a-zA-Z_]|[\u0b80-\u0bff]'
tokens = {
'root': [
include('keywords'),
(r'#.*$', Comment.Single),
(r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?', Operator),
- ('இல்', Operator.Word),
- (words(('assert', 'max', 'min',
- 'நீளம்', 'சரம்_இடமாற்று', 'சரம்_கண்டுபிடி',
- 'பட்டியல்', 'பின்இணை', 'வரிசைப்படுத்து',
- 'எடு', 'தலைகீழ்', 'நீட்டிக்க', 'நுழைக்க', 'வை',
- 'கோப்பை_திற', 'கோப்பை_எழுது', 'கோப்பை_மூடு',
- 'pi', 'sin', 'cos', 'tan', 'sqrt', 'hypot', 'pow',
- 'exp', 'log', 'log10', 'exit',
+ ('இல்', Operator.Word),
+ (words(('assert', 'max', 'min',
+ 'நீளம்', 'சரம்_இடமாற்று', 'சரம்_கண்டுபிடி',
+ 'பட்டியல்', 'பின்இணை', 'வரிசைப்படுத்து',
+ 'எடு', 'தலைகீழ்', 'நீட்டிக்க', 'நுழைக்க', 'வை',
+ 'கோப்பை_திற', 'கோப்பை_எழுது', 'கோப்பை_மூடு',
+ 'pi', 'sin', 'cos', 'tan', 'sqrt', 'hypot', 'pow',
+ 'exp', 'log', 'log10', 'exit',
), suffix=r'\b'), Name.Builtin),
(r'(True|False)\b', Keyword.Constant),
(r'[^\S\n]+', Whitespace),
@@ -51,10 +51,10 @@ class EzhilLexer(RegexLexer):
(r'[(){}\[\]:;.]', Punctuation),
],
'keywords': [
- ('பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword),
+ ('பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword),
],
'identifier': [
- ('(?:'+_TALETTERS+')(?:[0-9]|'+_TALETTERS+')*', Name),
+ ('(?:'+_TALETTERS+')(?:[0-9]|'+_TALETTERS+')*', Name),
],
'literal': [
(r'".*?"', String),
@@ -63,14 +63,14 @@ class EzhilLexer(RegexLexer):
]
}
- def analyse_text(text):
- """This language uses Tamil-script. We'll assume that if there's a
- decent amount of Tamil-characters, it's this language. This assumption
- is obviously horribly off if someone uses string literals in tamil
- in another language."""
- if len(re.findall(r'[\u0b80-\u0bff]', text)) > 10:
- return 0.25
-
+ def analyse_text(text):
+ """This language uses Tamil-script. We'll assume that if there's a
+ decent amount of Tamil-characters, it's this language. This assumption
+ is obviously horribly off if someone uses string literals in tamil
+ in another language."""
+ if len(re.findall(r'[\u0b80-\u0bff]', text)) > 10:
+ return 0.25
+
def __init__(self, **options):
- super().__init__(**options)
+ super().__init__(**options)
self.encoding = options.get('encoding', 'utf-8')
diff --git a/contrib/python/Pygments/py3/pygments/lexers/factor.py b/contrib/python/Pygments/py3/pygments/lexers/factor.py
index 05159835d4..0c1772e3a9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/factor.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/factor.py
@@ -4,7 +4,7 @@
Lexers for the Factor language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -285,7 +285,7 @@ class FactorLexer(RegexLexer):
(r'(<PRIVATE|PRIVATE>)(\s)', bygroups(Keyword.Namespace, Whitespace)),
# strings
- (r'"""\s(?:.|\n)*?\s"""', String),
+ (r'"""\s(?:.|\n)*?\s"""', String),
(r'"(?:\\\\|\\"|[^"])*"', String),
(r'(\S+")(\s+)((?:\\\\|\\"|[^"])*")',
bygroups(String, Whitespace, String)),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/fantom.py b/contrib/python/Pygments/py3/pygments/lexers/fantom.py
index 7272e006bd..de6c041aa6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/fantom.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/fantom.py
@@ -4,7 +4,7 @@
Lexer for the Fantom language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/felix.py b/contrib/python/Pygments/py3/pygments/lexers/felix.py
index 55dee25e6d..7bc01eccc9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/felix.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/felix.py
@@ -4,7 +4,7 @@
Lexer for the Felix language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/floscript.py b/contrib/python/Pygments/py3/pygments/lexers/floscript.py
index e9b7da2bfc..ed8452a05c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/floscript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/floscript.py
@@ -4,7 +4,7 @@
Lexer for FloScript
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -58,7 +58,7 @@ class FloScriptLexer(RegexLexer):
include('name'),
include('numbers'),
- (r'#.+$', Comment.Single),
+ (r'#.+$', Comment.Single),
],
'string': [
('[^"]+', String),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/forth.py b/contrib/python/Pygments/py3/pygments/lexers/forth.py
index 1f67aa4ed5..3eed947f8a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/forth.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/forth.py
@@ -4,13 +4,13 @@
Lexer for the Forth language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import RegexLexer, bygroups
+from pygments.lexer import RegexLexer, bygroups
from pygments.token import Text, Comment, Keyword, Name, String, Number, \
Whitespace
@@ -64,7 +64,7 @@ class ForthLexer(RegexLexer):
r'then|type|u\.|u\<|um\*|um\/mod|unloop|until|'
r'variable|while|word|xor|\[char\]|\[\'\]|'
r'@|!|\#|<\#|\#>|:|;|\+|-|\*|\/|,|<|>|\|1\+|1-|\.|'
- # *** Wordset CORE-EXT
+ # *** Wordset CORE-EXT
r'\.r|0<>|'
r'0>|2>r|2r>|2r@|:noname|\?do|again|c\"|'
r'case|compile,|endcase|endof|erase|false|'
@@ -72,38 +72,38 @@ class ForthLexer(RegexLexer):
r'restore-input|roll|save-input|source-id|to|'
r'true|tuck|u\.r|u>|unused|value|within|'
r'\[compile\]|'
- # *** Wordset CORE-EXT-obsolescent
+ # *** Wordset CORE-EXT-obsolescent
r'\#tib|convert|expect|query|span|'
r'tib|'
- # *** Wordset DOUBLE
+ # *** Wordset DOUBLE
r'2constant|2literal|2variable|d\+|d-|'
r'd\.|d\.r|d0<|d0=|d2\*|d2\/|d<|d=|d>s|'
r'dabs|dmax|dmin|dnegate|m\*\/|m\+|'
- # *** Wordset DOUBLE-EXT
+ # *** Wordset DOUBLE-EXT
r'2rot|du<|'
- # *** Wordset EXCEPTION
+ # *** Wordset EXCEPTION
r'catch|throw|'
- # *** Wordset EXCEPTION-EXT
+ # *** Wordset EXCEPTION-EXT
r'abort|abort\"|'
- # *** Wordset FACILITY
+ # *** Wordset FACILITY
r'at-xy|key\?|page|'
- # *** Wordset FACILITY-EXT
+ # *** Wordset FACILITY-EXT
r'ekey|ekey>char|ekey\?|emit\?|ms|time&date|'
- # *** Wordset FILE
+ # *** Wordset FILE
r'BIN|CLOSE-FILE|CREATE-FILE|DELETE-FILE|FILE-POSITION|'
r'FILE-SIZE|INCLUDE-FILE|INCLUDED|OPEN-FILE|R\/O|'
r'R\/W|READ-FILE|READ-LINE|REPOSITION-FILE|RESIZE-FILE|'
r'S\"|SOURCE-ID|W/O|WRITE-FILE|WRITE-LINE|'
- # *** Wordset FILE-EXT
+ # *** Wordset FILE-EXT
r'FILE-STATUS|FLUSH-FILE|REFILL|RENAME-FILE|'
- # *** Wordset FLOAT
+ # *** Wordset FLOAT
r'>float|d>f|'
r'f!|f\*|f\+|f-|f\/|f0<|f0=|f<|f>d|f@|'
r'falign|faligned|fconstant|fdepth|fdrop|fdup|'
r'fliteral|float\+|floats|floor|fmax|fmin|'
r'fnegate|fover|frot|fround|fswap|fvariable|'
r'represent|'
- # *** Wordset FLOAT-EXT
+ # *** Wordset FLOAT-EXT
r'df!|df@|dfalign|dfaligned|dfloat\+|'
r'dfloats|f\*\*|f\.|fabs|facos|facosh|falog|'
r'fasin|fasinh|fatan|fatan2|fatanh|fcos|fcosh|'
@@ -111,34 +111,34 @@ class ForthLexer(RegexLexer):
r'fsincos|fsinh|fsqrt|ftan|ftanh|f~|precision|'
r'set-precision|sf!|sf@|sfalign|sfaligned|sfloat\+|'
r'sfloats|'
- # *** Wordset LOCAL
+ # *** Wordset LOCAL
r'\(local\)|to|'
- # *** Wordset LOCAL-EXT
+ # *** Wordset LOCAL-EXT
r'locals\||'
- # *** Wordset MEMORY
+ # *** Wordset MEMORY
r'allocate|free|resize|'
- # *** Wordset SEARCH
+ # *** Wordset SEARCH
r'definitions|find|forth-wordlist|get-current|'
r'get-order|search-wordlist|set-current|set-order|'
r'wordlist|'
- # *** Wordset SEARCH-EXT
+ # *** Wordset SEARCH-EXT
r'also|forth|only|order|previous|'
- # *** Wordset STRING
+ # *** Wordset STRING
r'-trailing|\/string|blank|cmove|cmove>|compare|'
r'search|sliteral|'
- # *** Wordset TOOLS
+ # *** Wordset TOOLS
r'.s|dump|see|words|'
- # *** Wordset TOOLS-EXT
+ # *** Wordset TOOLS-EXT
r';code|'
r'ahead|assembler|bye|code|cs-pick|cs-roll|'
r'editor|state|\[else\]|\[if\]|\[then\]|'
- # *** Wordset TOOLS-EXT-obsolescent
- r'forget|'
- # Forth 2012
- r'defer|defer@|defer!|action-of|begin-structure|field:|buffer:|'
- r'parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|'
- r'name>interpret|name>compile|name>string|'
- r'cfield:|end-structure)(?!\S)', Keyword),
+ # *** Wordset TOOLS-EXT-obsolescent
+ r'forget|'
+ # Forth 2012
+ r'defer|defer@|defer!|action-of|begin-structure|field:|buffer:|'
+ r'parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|'
+ r'name>interpret|name>compile|name>string|'
+ r'cfield:|end-structure)(?!\S)', Keyword),
# Numbers
(r'(\$[0-9A-F]+)', Number.Hex),
@@ -149,18 +149,18 @@ class ForthLexer(RegexLexer):
r'itype|icompare|sp@|sp!|rp@|rp!|up@|up!|'
r'>a|a>|a@|a!|a@+|a@-|>b|b>|b@|b!|b@+|b@-|'
r'find-name|1ms|'
- r'sp0|rp0|\(evaluate\)|int-trap|int!)(?!\S)',
+ r'sp0|rp0|\(evaluate\)|int-trap|int!)(?!\S)',
Name.Constant),
# a proposal
(r'(do-recognizer|r:fail|recognizer:|get-recognizers|'
r'set-recognizers|r:float|r>comp|r>int|r>post|'
r'r:name|r:word|r:dnum|r:num|recognizer|forth-recognizer|'
- r'rec:num|rec:float|rec:word)(?!\S)', Name.Decorator),
+ r'rec:num|rec:float|rec:word)(?!\S)', Name.Decorator),
# defining words. The next word is a new command name
(r'(Evalue|Rvalue|Uvalue|Edefer|Rdefer|Udefer)(\s+)',
bygroups(Keyword.Namespace, Text), 'worddef'),
- (r'\S+', Name.Function), # Anything else is executed
+ (r'\S+', Name.Function), # Anything else is executed
],
'worddef': [
@@ -170,9 +170,9 @@ class ForthLexer(RegexLexer):
(r'[^"]+', String, '#pop'),
],
}
-
- def analyse_text(text):
- """Forth uses : COMMAND ; quite a lot in a single line, so we're trying
- to find that."""
- if re.search('\n:[^\n]+;\n', text):
- return 0.3
+
+ def analyse_text(text):
+ """Forth uses : COMMAND ; quite a lot in a single line, so we're trying
+ to find that."""
+ if re.search('\n:[^\n]+;\n', text):
+ return 0.3
diff --git a/contrib/python/Pygments/py3/pygments/lexers/fortran.py b/contrib/python/Pygments/py3/pygments/lexers/fortran.py
index b5d977eaf0..a0bc3eb9b3 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/fortran.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/fortran.py
@@ -4,7 +4,7 @@
Lexers for Fortran languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -59,17 +59,17 @@ class FortranLexer(RegexLexer):
'CODIMENSION', 'COMMON', 'CONTIGUOUS', 'CONTAINS',
'CONTINUE', 'CRITICAL', 'CYCLE', 'DATA', 'DEALLOCATE', 'DECODE',
'DEFERRED', 'DIMENSION', 'DO', 'ELEMENTAL', 'ELSE', 'ENCODE', 'END',
- 'ENDASSOCIATE', 'ENDBLOCK', 'ENDDO', 'ENDENUM', 'ENDFORALL',
- 'ENDFUNCTION', 'ENDIF', 'ENDINTERFACE', 'ENDMODULE', 'ENDPROGRAM',
- 'ENDSELECT', 'ENDSUBMODULE', 'ENDSUBROUTINE', 'ENDTYPE', 'ENDWHERE',
- 'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'ERROR STOP', 'EXIT',
- 'EXTENDS', 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT',
+ 'ENDASSOCIATE', 'ENDBLOCK', 'ENDDO', 'ENDENUM', 'ENDFORALL',
+ 'ENDFUNCTION', 'ENDIF', 'ENDINTERFACE', 'ENDMODULE', 'ENDPROGRAM',
+ 'ENDSELECT', 'ENDSUBMODULE', 'ENDSUBROUTINE', 'ENDTYPE', 'ENDWHERE',
+ 'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'ERROR STOP', 'EXIT',
+ 'EXTENDS', 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT',
'FUNCTION', 'GENERIC', 'IF', 'IMAGES', 'IMPLICIT',
'IMPORT', 'IMPURE', 'INCLUDE', 'INQUIRE', 'INTENT', 'INTERFACE',
'INTRINSIC', 'IS', 'LOCK', 'MEMORY', 'MODULE', 'NAMELIST', 'NULLIFY',
- 'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'ONLY', 'OPEN',
- 'OPTIONAL', 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT',
- 'PRIVATE', 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ',
+ 'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'ONLY', 'OPEN',
+ 'OPTIONAL', 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT',
+ 'PRIVATE', 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ',
'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE',
'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES',
'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE',
@@ -161,10 +161,10 @@ class FortranLexer(RegexLexer):
],
'nums': [
- (r'\d+(?![.e])(_([1-9]|[a-z]\w*))?', Number.Integer),
- (r'[+-]?\d*\.\d+([ed][-+]?\d+)?(_([1-9]|[a-z]\w*))?', Number.Float),
- (r'[+-]?\d+\.\d*([ed][-+]?\d+)?(_([1-9]|[a-z]\w*))?', Number.Float),
- (r'[+-]?\d+(\.\d*)?[ed][-+]?\d+(_([1-9]|[a-z]\w*))?', Number.Float),
+ (r'\d+(?![.e])(_([1-9]|[a-z]\w*))?', Number.Integer),
+ (r'[+-]?\d*\.\d+([ed][-+]?\d+)?(_([1-9]|[a-z]\w*))?', Number.Float),
+ (r'[+-]?\d+\.\d*([ed][-+]?\d+)?(_([1-9]|[a-z]\w*))?', Number.Float),
+ (r'[+-]?\d+(\.\d*)?[ed][-+]?\d+(_([1-9]|[a-z]\w*))?', Number.Float),
],
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/foxpro.py b/contrib/python/Pygments/py3/pygments/lexers/foxpro.py
index 0b756d4da3..04d7d132ee 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/foxpro.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/foxpro.py
@@ -4,7 +4,7 @@
Simple lexer for Microsoft Visual FoxPro source code.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -35,7 +35,7 @@ class FoxProLexer(RegexLexer):
tokens = {
'root': [
- (r';\s*\n', Punctuation), # consume newline
+ (r';\s*\n', Punctuation), # consume newline
(r'(^|\n)\s*', Text, 'newline'),
# Square brackets may be used for array indices
diff --git a/contrib/python/Pygments/py3/pygments/lexers/freefem.py b/contrib/python/Pygments/py3/pygments/lexers/freefem.py
index 532f134fa8..ef0dced2b7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/freefem.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/freefem.py
@@ -4,7 +4,7 @@
Lexer for FreeFem++ language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -35,27 +35,27 @@ class FreeFemLexer(CppLexer):
mimetypes = ['text/x-freefem']
# Language operators
- operators = {'+', '-', '*', '.*', '/', './', '%', '^', '^-1', ':', '\''}
+ operators = {'+', '-', '*', '.*', '/', './', '%', '^', '^-1', ':', '\''}
# types
- types = {'bool', 'border', 'complex', 'dmatrix', 'fespace', 'func', 'gslspline',
- 'ifstream', 'int', 'macro', 'matrix', 'mesh', 'mesh3', 'mpiComm',
- 'mpiGroup', 'mpiRequest', 'NewMacro', 'EndMacro', 'ofstream', 'Pmmap',
- 'problem', 'Psemaphore', 'real', 'solve', 'string', 'varf'}
+ types = {'bool', 'border', 'complex', 'dmatrix', 'fespace', 'func', 'gslspline',
+ 'ifstream', 'int', 'macro', 'matrix', 'mesh', 'mesh3', 'mpiComm',
+ 'mpiGroup', 'mpiRequest', 'NewMacro', 'EndMacro', 'ofstream', 'Pmmap',
+ 'problem', 'Psemaphore', 'real', 'solve', 'string', 'varf'}
# finite element spaces
- fespaces = {'BDM1', 'BDM1Ortho', 'Edge03d', 'Edge13d', 'Edge23d', 'FEQF', 'HCT',
- 'P0', 'P03d', 'P0Edge', 'P1', 'P13d', 'P1b', 'P1b3d', 'P1bl', 'P1bl3d',
- 'P1dc', 'P1Edge', 'P1nc', 'P2', 'P23d', 'P2b', 'P2BR', 'P2dc', 'P2Edge',
- 'P2h', 'P2Morley', 'P2pnc', 'P3', 'P3dc', 'P3Edge', 'P4', 'P4dc',
- 'P4Edge', 'P5Edge', 'RT0', 'RT03d', 'RT0Ortho', 'RT1', 'RT1Ortho',
- 'RT2', 'RT2Ortho'}
+ fespaces = {'BDM1', 'BDM1Ortho', 'Edge03d', 'Edge13d', 'Edge23d', 'FEQF', 'HCT',
+ 'P0', 'P03d', 'P0Edge', 'P1', 'P13d', 'P1b', 'P1b3d', 'P1bl', 'P1bl3d',
+ 'P1dc', 'P1Edge', 'P1nc', 'P2', 'P23d', 'P2b', 'P2BR', 'P2dc', 'P2Edge',
+ 'P2h', 'P2Morley', 'P2pnc', 'P3', 'P3dc', 'P3Edge', 'P4', 'P4dc',
+ 'P4Edge', 'P5Edge', 'RT0', 'RT03d', 'RT0Ortho', 'RT1', 'RT1Ortho',
+ 'RT2', 'RT2Ortho'}
# preprocessor
- preprocessor = {'ENDIFMACRO', 'include', 'IFMACRO', 'load'}
+ preprocessor = {'ENDIFMACRO', 'include', 'IFMACRO', 'load'}
# Language keywords
- keywords = {
+ keywords = {
'adj',
'append',
'area',
@@ -168,10 +168,10 @@ class FreeFemLexer(CppLexer):
'x',
'y',
'z'
- }
+ }
# Language shipped functions and class ( )
- functions = {
+ functions = {
'abs',
'acos',
'acosh',
@@ -701,10 +701,10 @@ class FreeFemLexer(CppLexer):
'y0',
'y1',
'yn'
- }
+ }
# function parameters
- parameters = {
+ parameters = {
'A',
'A1',
'abserror',
@@ -848,13 +848,13 @@ class FreeFemLexer(CppLexer):
'WindowIndex',
'which',
'zbound'
- }
+ }
# deprecated
- deprecated = {'fixeborder'}
+ deprecated = {'fixeborder'}
# do not highlight
- suppress_highlight = {
+ suppress_highlight = {
'alignof',
'asm',
'constexpr',
@@ -873,7 +873,7 @@ class FreeFemLexer(CppLexer):
'typeid',
'typename',
'using'
- }
+ }
def get_tokens_unprocessed(self, text):
for index, token, value in CppLexer.get_tokens_unprocessed(self, text):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/functional.py b/contrib/python/Pygments/py3/pygments/lexers/functional.py
index e33a72e21e..afbd23f47d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/functional.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/functional.py
@@ -4,7 +4,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/futhark.py b/contrib/python/Pygments/py3/pygments/lexers/futhark.py
index 57bfb94314..5ca2fc238d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/futhark.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/futhark.py
@@ -1,111 +1,111 @@
-"""
- pygments.lexers.futhark
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Futhark language
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+"""
+ pygments.lexers.futhark
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Futhark language
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-from pygments import unistring as uni
-
-__all__ = ['FutharkLexer']
-
-
-line_re = re.compile('.*?\n')
-
-
-class FutharkLexer(RegexLexer):
- """
- A Futhark lexer
-
- .. versionadded:: 2.8
- """
- name = 'Futhark'
- aliases = ['futhark']
- filenames = ['*.fut']
- mimetypes = ['text/x-futhark']
-
- flags = re.MULTILINE | re.UNICODE
-
- num_types = ('i8', 'i16', 'i32', 'i64', 'u8', 'u16', 'u32', 'u64', 'f32', 'f64')
-
- other_types = ('bool', )
-
- reserved = ('if', 'then', 'else', 'let', 'loop', 'in', 'with', 'type',
- 'val', 'entry', 'for', 'while', 'do', 'case', 'match',
- 'include', 'import', 'module', 'open', 'local', 'assert', '_')
-
- ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
- 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
- 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
- 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
-
- num_postfix = r'(%s)?' % '|'.join(num_types)
-
- identifier_re = '[a-zA-Z_][a-zA-Z_0-9\']*'
-
- # opstart_re = '+\-\*/%=\!><\|&\^'
-
- tokens = {
- 'root': [
+from pygments import unistring as uni
+
+__all__ = ['FutharkLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class FutharkLexer(RegexLexer):
+ """
+ A Futhark lexer
+
+ .. versionadded:: 2.8
+ """
+ name = 'Futhark'
+ aliases = ['futhark']
+ filenames = ['*.fut']
+ mimetypes = ['text/x-futhark']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ num_types = ('i8', 'i16', 'i32', 'i64', 'u8', 'u16', 'u32', 'u64', 'f32', 'f64')
+
+ other_types = ('bool', )
+
+ reserved = ('if', 'then', 'else', 'let', 'loop', 'in', 'with', 'type',
+ 'val', 'entry', 'for', 'while', 'do', 'case', 'match',
+ 'include', 'import', 'module', 'open', 'local', 'assert', '_')
+
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ num_postfix = r'(%s)?' % '|'.join(num_types)
+
+ identifier_re = '[a-zA-Z_][a-zA-Z_0-9\']*'
+
+ # opstart_re = '+\-\*/%=\!><\|&\^'
+
+ tokens = {
+ 'root': [
(r'--(.*?)$', Comment.Single),
(r'\s+', Whitespace),
- (r'\(\)', Punctuation),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'\b(%s)(?!\')\b' % '|'.join(num_types + other_types), Keyword.Type),
-
- # Identifiers
- (r'#\[([a-zA-Z_\(\) ]*)\]', Comment.Preproc),
- (r'!?(%s\.)*%s' % (identifier_re, identifier_re), Name),
-
- (r'\\', Operator),
- (r'[-+/%=!><|&*^][-+/%=!><|&*^.]*', Operator),
- (r'[][(),:;`{}]', Punctuation),
-
- # Numbers
- (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*_*[pP][+-]?\d(_*\d)*' + num_postfix,
- Number.Float),
- (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*\.[\da-fA-F](_*[\da-fA-F])*'
- r'(_*[pP][+-]?\d(_*\d)*)?' + num_postfix, Number.Float),
- (r'\d(_*\d)*_*[eE][+-]?\d(_*\d)*' + num_postfix, Number.Float),
- (r'\d(_*\d)*\.\d(_*\d)*(_*[eE][+-]?\d(_*\d)*)?' + num_postfix, Number.Float),
- (r'0[bB]_*[01](_*[01])*' + num_postfix, Number.Bin),
- (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*' + num_postfix, Number.Hex),
- (r'\d(_*\d)*' + num_postfix, Number.Integer),
-
- # Character/String Literals
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- # Special
- (r'\[[a-zA-Z_\d]*\]', Keyword.Type),
- (r'\(\)', Name.Builtin),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']'", String.Char, '#pop'),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
-
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][' + uni.Lu + r'@^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
+ (r'\(\)', Punctuation),
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'\b(%s)(?!\')\b' % '|'.join(num_types + other_types), Keyword.Type),
+
+ # Identifiers
+ (r'#\[([a-zA-Z_\(\) ]*)\]', Comment.Preproc),
+ (r'!?(%s\.)*%s' % (identifier_re, identifier_re), Name),
+
+ (r'\\', Operator),
+ (r'[-+/%=!><|&*^][-+/%=!><|&*^.]*', Operator),
+ (r'[][(),:;`{}]', Punctuation),
+
+ # Numbers
+ (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*_*[pP][+-]?\d(_*\d)*' + num_postfix,
+ Number.Float),
+ (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*\.[\da-fA-F](_*[\da-fA-F])*'
+ r'(_*[pP][+-]?\d(_*\d)*)?' + num_postfix, Number.Float),
+ (r'\d(_*\d)*_*[eE][+-]?\d(_*\d)*' + num_postfix, Number.Float),
+ (r'\d(_*\d)*\.\d(_*\d)*(_*[eE][+-]?\d(_*\d)*)?' + num_postfix, Number.Float),
+ (r'0[bB]_*[01](_*[01])*' + num_postfix, Number.Bin),
+ (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*' + num_postfix, Number.Hex),
+ (r'\d(_*\d)*' + num_postfix, Number.Integer),
+
+ # Character/String Literals
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ # Special
+ (r'\[[a-zA-Z_\d]*\]', Keyword.Type),
+ (r'\(\)', Name.Builtin),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']'", String.Char, '#pop'),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][' + uni.Lu + r'@^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
(r'(\s+)(\\)', bygroups(Whitespace, String.Escape), '#pop'),
- ],
- }
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/gcodelexer.py b/contrib/python/Pygments/py3/pygments/lexers/gcodelexer.py
index 03be281f5d..21949be90f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/gcodelexer.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/gcodelexer.py
@@ -1,35 +1,35 @@
-"""
- pygments.lexers.gcodelexer
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the G Code Language.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, Name, Text, Keyword, Number
-
-__all__ = ['GcodeLexer']
-
-
-class GcodeLexer(RegexLexer):
- """
- For gcode source code.
-
- .. versionadded:: 2.9
- """
- name = 'g-code'
- aliases = ['gcode']
- filenames = ['*.gcode']
-
- tokens = {
- 'root': [
- (r';.*\n', Comment),
- (r'^[gmGM]\d{1,4}\s', Name.Builtin), # M or G commands
- (r'([^gGmM])([+-]?\d*[.]?\d+)', bygroups(Keyword, Number)),
- (r'\s', Text.Whitespace),
- (r'.*\n', Text),
- ]
- }
+"""
+ pygments.lexers.gcodelexer
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the G Code Language.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Comment, Name, Text, Keyword, Number
+
+__all__ = ['GcodeLexer']
+
+
+class GcodeLexer(RegexLexer):
+ """
+ For gcode source code.
+
+ .. versionadded:: 2.9
+ """
+ name = 'g-code'
+ aliases = ['gcode']
+ filenames = ['*.gcode']
+
+ tokens = {
+ 'root': [
+ (r';.*\n', Comment),
+ (r'^[gmGM]\d{1,4}\s', Name.Builtin), # M or G commands
+ (r'([^gGmM])([+-]?\d*[.]?\d+)', bygroups(Keyword, Number)),
+ (r'\s', Text.Whitespace),
+ (r'.*\n', Text),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/gdscript.py b/contrib/python/Pygments/py3/pygments/lexers/gdscript.py
index 1048d1b56f..4c4d06a306 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/gdscript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/gdscript.py
@@ -1,346 +1,346 @@
-"""
- pygments.lexers.gdscript
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for GDScript.
-
- Modified by Daniel J. Ramirez <djrmuv@gmail.com> based on the original
- python.py.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default, words, \
- combined
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+"""
+ pygments.lexers.gdscript
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for GDScript.
+
+ Modified by Daniel J. Ramirez <djrmuv@gmail.com> based on the original
+ python.py.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, words, \
+ combined
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ["GDScriptLexer"]
-
-line_re = re.compile(".*?\n")
-
-
-class GDScriptLexer(RegexLexer):
- """
- For `GDScript source code <https://www.godotengine.org>`_.
- """
-
- name = "GDScript"
- aliases = ["gdscript", "gd"]
- filenames = ["*.gd"]
- mimetypes = ["text/x-gdscript", "application/x-gdscript"]
-
- def innerstring_rules(ttype):
- return [
- # the old style '%s' % (...) string formatting
- (
- r"%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?"
- "[hlL]?[E-GXc-giorsux%]",
- String.Interpol,
- ),
- # backslashes, quotes and formatting signs must be parsed one at a time
- (r'[^\\\'"%\n]+', ttype),
- (r'[\'"\\]', ttype),
- # unhandled string formatting sign
- (r"%", ttype),
- # newlines are an error (use "nl" state)
- ]
-
- tokens = {
- "root": [
+
+__all__ = ["GDScriptLexer"]
+
+line_re = re.compile(".*?\n")
+
+
+class GDScriptLexer(RegexLexer):
+ """
+ For `GDScript source code <https://www.godotengine.org>`_.
+ """
+
+ name = "GDScript"
+ aliases = ["gdscript", "gd"]
+ filenames = ["*.gd"]
+ mimetypes = ["text/x-gdscript", "application/x-gdscript"]
+
+ def innerstring_rules(ttype):
+ return [
+ # the old style '%s' % (...) string formatting
+ (
+ r"%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?"
+ "[hlL]?[E-GXc-giorsux%]",
+ String.Interpol,
+ ),
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"%\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ # unhandled string formatting sign
+ (r"%", ttype),
+ # newlines are an error (use "nl" state)
+ ]
+
+ tokens = {
+ "root": [
(r"\n", Whitespace),
- (
- r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
+ (
+ r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
bygroups(Whitespace, String.Affix, String.Doc),
- ),
- (
- r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
+ ),
+ (
+ r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
bygroups(Whitespace, String.Affix, String.Doc),
- ),
+ ),
(r"[^\S\n]+", Whitespace),
- (r"#.*$", Comment.Single),
- (r"[]{}:(),;[]", Punctuation),
+ (r"#.*$", Comment.Single),
+ (r"[]{}:(),;[]", Punctuation),
(r"(\\)(\n)", bygroups(Text, Whitespace)),
- (r"\\", Text),
- (r"(in|and|or|not)\b", Operator.Word),
- (
- r"!=|==|<<|>>|&&|\+=|-=|\*=|/=|%=|&=|\|=|\|\||[-~+/*%=<>&^.!|$]",
- Operator,
- ),
- include("keywords"),
+ (r"\\", Text),
+ (r"(in|and|or|not)\b", Operator.Word),
+ (
+ r"!=|==|<<|>>|&&|\+=|-=|\*=|/=|%=|&=|\|=|\|\||[-~+/*%=<>&^.!|$]",
+ Operator,
+ ),
+ include("keywords"),
(r"(func)(\s+)", bygroups(Keyword, Whitespace), "funcname"),
(r"(class)(\s+)", bygroups(Keyword, Whitespace), "classname"),
- include("builtins"),
- (
- '([rR]|[uUbB][rR]|[rR][uUbB])(""")',
- bygroups(String.Affix, String.Double),
- "tdqs",
- ),
- (
- "([rR]|[uUbB][rR]|[rR][uUbB])(''')",
- bygroups(String.Affix, String.Single),
- "tsqs",
- ),
- (
- '([rR]|[uUbB][rR]|[rR][uUbB])(")',
- bygroups(String.Affix, String.Double),
- "dqs",
- ),
- (
- "([rR]|[uUbB][rR]|[rR][uUbB])(')",
- bygroups(String.Affix, String.Single),
- "sqs",
- ),
- (
- '([uUbB]?)(""")',
- bygroups(String.Affix, String.Double),
- combined("stringescape", "tdqs"),
- ),
- (
- "([uUbB]?)(''')",
- bygroups(String.Affix, String.Single),
- combined("stringescape", "tsqs"),
- ),
- (
- '([uUbB]?)(")',
- bygroups(String.Affix, String.Double),
- combined("stringescape", "dqs"),
- ),
- (
- "([uUbB]?)(')",
- bygroups(String.Affix, String.Single),
- combined("stringescape", "sqs"),
- ),
- include("name"),
- include("numbers"),
- ],
- "keywords": [
- (
- words(
- (
- "and",
- "in",
- "not",
- "or",
- "as",
- "breakpoint",
- "class",
- "class_name",
- "extends",
- "is",
- "func",
- "setget",
- "signal",
- "tool",
- "const",
- "enum",
- "export",
- "onready",
- "static",
- "var",
- "break",
- "continue",
- "if",
- "elif",
- "else",
- "for",
- "pass",
- "return",
- "match",
- "while",
- "remote",
- "master",
- "puppet",
- "remotesync",
- "mastersync",
- "puppetsync",
- ),
- suffix=r"\b",
- ),
- Keyword,
- ),
- ],
- "builtins": [
- (
- words(
- (
- "Color8",
- "ColorN",
- "abs",
- "acos",
- "asin",
- "assert",
- "atan",
- "atan2",
- "bytes2var",
- "ceil",
- "char",
- "clamp",
- "convert",
- "cos",
- "cosh",
- "db2linear",
- "decimals",
- "dectime",
- "deg2rad",
- "dict2inst",
- "ease",
- "exp",
- "floor",
- "fmod",
- "fposmod",
- "funcref",
- "hash",
- "inst2dict",
- "instance_from_id",
- "is_inf",
- "is_nan",
- "lerp",
- "linear2db",
- "load",
- "log",
- "max",
- "min",
- "nearest_po2",
- "pow",
- "preload",
- "print",
- "print_stack",
- "printerr",
- "printraw",
- "prints",
- "printt",
- "rad2deg",
- "rand_range",
- "rand_seed",
- "randf",
- "randi",
- "randomize",
- "range",
- "round",
- "seed",
- "sign",
- "sin",
- "sinh",
- "sqrt",
- "stepify",
- "str",
- "str2var",
- "tan",
- "tan",
- "tanh",
- "type_exist",
- "typeof",
- "var2bytes",
- "var2str",
- "weakref",
- "yield",
- ),
- prefix=r"(?<!\.)",
- suffix=r"\b",
- ),
- Name.Builtin,
- ),
- (r"((?<!\.)(self|false|true)|(PI|TAU|NAN|INF)" r")\b", Name.Builtin.Pseudo),
- (
- words(
- (
- "bool",
- "int",
- "float",
- "String",
- "NodePath",
- "Vector2",
- "Rect2",
- "Transform2D",
- "Vector3",
- "Rect3",
- "Plane",
- "Quat",
- "Basis",
- "Transform",
- "Color",
- "RID",
- "Object",
- "NodePath",
- "Dictionary",
- "Array",
- "PackedByteArray",
- "PackedInt32Array",
- "PackedInt64Array",
- "PackedFloat32Array",
- "PackedFloat64Array",
- "PackedStringArray",
- "PackedVector2Array",
- "PackedVector3Array",
- "PackedColorArray",
- "null",
+ include("builtins"),
+ (
+ '([rR]|[uUbB][rR]|[rR][uUbB])(""")',
+ bygroups(String.Affix, String.Double),
+ "tdqs",
+ ),
+ (
+ "([rR]|[uUbB][rR]|[rR][uUbB])(''')",
+ bygroups(String.Affix, String.Single),
+ "tsqs",
+ ),
+ (
+ '([rR]|[uUbB][rR]|[rR][uUbB])(")',
+ bygroups(String.Affix, String.Double),
+ "dqs",
+ ),
+ (
+ "([rR]|[uUbB][rR]|[rR][uUbB])(')",
+ bygroups(String.Affix, String.Single),
+ "sqs",
+ ),
+ (
+ '([uUbB]?)(""")',
+ bygroups(String.Affix, String.Double),
+ combined("stringescape", "tdqs"),
+ ),
+ (
+ "([uUbB]?)(''')",
+ bygroups(String.Affix, String.Single),
+ combined("stringescape", "tsqs"),
+ ),
+ (
+ '([uUbB]?)(")',
+ bygroups(String.Affix, String.Double),
+ combined("stringescape", "dqs"),
+ ),
+ (
+ "([uUbB]?)(')",
+ bygroups(String.Affix, String.Single),
+ combined("stringescape", "sqs"),
+ ),
+ include("name"),
+ include("numbers"),
+ ],
+ "keywords": [
+ (
+ words(
+ (
+ "and",
+ "in",
+ "not",
+ "or",
+ "as",
+ "breakpoint",
+ "class",
+ "class_name",
+ "extends",
+ "is",
+ "func",
+ "setget",
+ "signal",
+ "tool",
+ "const",
+ "enum",
+ "export",
+ "onready",
+ "static",
+ "var",
+ "break",
+ "continue",
+ "if",
+ "elif",
+ "else",
+ "for",
+ "pass",
+ "return",
+ "match",
+ "while",
+ "remote",
+ "master",
+ "puppet",
+ "remotesync",
+ "mastersync",
+ "puppetsync",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword,
+ ),
+ ],
+ "builtins": [
+ (
+ words(
+ (
+ "Color8",
+ "ColorN",
+ "abs",
+ "acos",
+ "asin",
+ "assert",
+ "atan",
+ "atan2",
+ "bytes2var",
+ "ceil",
+ "char",
+ "clamp",
+ "convert",
+ "cos",
+ "cosh",
+ "db2linear",
+ "decimals",
+ "dectime",
+ "deg2rad",
+ "dict2inst",
+ "ease",
+ "exp",
+ "floor",
+ "fmod",
+ "fposmod",
+ "funcref",
+ "hash",
+ "inst2dict",
+ "instance_from_id",
+ "is_inf",
+ "is_nan",
+ "lerp",
+ "linear2db",
+ "load",
+ "log",
+ "max",
+ "min",
+ "nearest_po2",
+ "pow",
+ "preload",
+ "print",
+ "print_stack",
+ "printerr",
+ "printraw",
+ "prints",
+ "printt",
+ "rad2deg",
+ "rand_range",
+ "rand_seed",
+ "randf",
+ "randi",
+ "randomize",
+ "range",
+ "round",
+ "seed",
+ "sign",
+ "sin",
+ "sinh",
+ "sqrt",
+ "stepify",
+ "str",
+ "str2var",
+ "tan",
+ "tan",
+ "tanh",
+ "type_exist",
+ "typeof",
+ "var2bytes",
+ "var2str",
+ "weakref",
+ "yield",
+ ),
+ prefix=r"(?<!\.)",
+ suffix=r"\b",
+ ),
+ Name.Builtin,
+ ),
+ (r"((?<!\.)(self|false|true)|(PI|TAU|NAN|INF)" r")\b", Name.Builtin.Pseudo),
+ (
+ words(
+ (
+ "bool",
+ "int",
+ "float",
+ "String",
+ "NodePath",
+ "Vector2",
+ "Rect2",
+ "Transform2D",
+ "Vector3",
+ "Rect3",
+ "Plane",
+ "Quat",
+ "Basis",
+ "Transform",
+ "Color",
+ "RID",
+ "Object",
+ "NodePath",
+ "Dictionary",
+ "Array",
+ "PackedByteArray",
+ "PackedInt32Array",
+ "PackedInt64Array",
+ "PackedFloat32Array",
+ "PackedFloat64Array",
+ "PackedStringArray",
+ "PackedVector2Array",
+ "PackedVector3Array",
+ "PackedColorArray",
+ "null",
"void",
- ),
- prefix=r"(?<!\.)",
- suffix=r"\b",
- ),
- Name.Builtin.Type,
- ),
- ],
- "numbers": [
- (r"(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?", Number.Float),
- (r"\d+[eE][+-]?[0-9]+j?", Number.Float),
- (r"0[xX][a-fA-F0-9]+", Number.Hex),
- (r"\d+j?", Number.Integer),
- ],
- "name": [(r"[a-zA-Z_]\w*", Name)],
- "funcname": [(r"[a-zA-Z_]\w*", Name.Function, "#pop"), default("#pop")],
- "classname": [(r"[a-zA-Z_]\w*", Name.Class, "#pop")],
- "stringescape": [
- (
- r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r"U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})",
- String.Escape,
- )
- ],
- "strings-single": innerstring_rules(String.Single),
- "strings-double": innerstring_rules(String.Double),
- "dqs": [
- (r'"', String.Double, "#pop"),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include("strings-double"),
- ],
- "sqs": [
- (r"'", String.Single, "#pop"),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include("strings-single"),
- ],
- "tdqs": [
- (r'"""', String.Double, "#pop"),
- include("strings-double"),
+ ),
+ prefix=r"(?<!\.)",
+ suffix=r"\b",
+ ),
+ Name.Builtin.Type,
+ ),
+ ],
+ "numbers": [
+ (r"(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?", Number.Float),
+ (r"\d+[eE][+-]?[0-9]+j?", Number.Float),
+ (r"0[xX][a-fA-F0-9]+", Number.Hex),
+ (r"\d+j?", Number.Integer),
+ ],
+ "name": [(r"[a-zA-Z_]\w*", Name)],
+ "funcname": [(r"[a-zA-Z_]\w*", Name.Function, "#pop"), default("#pop")],
+ "classname": [(r"[a-zA-Z_]\w*", Name.Class, "#pop")],
+ "stringescape": [
+ (
+ r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r"U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})",
+ String.Escape,
+ )
+ ],
+ "strings-single": innerstring_rules(String.Single),
+ "strings-double": innerstring_rules(String.Double),
+ "dqs": [
+ (r'"', String.Double, "#pop"),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ include("strings-double"),
+ ],
+ "sqs": [
+ (r"'", String.Single, "#pop"),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
+ include("strings-single"),
+ ],
+ "tdqs": [
+ (r'"""', String.Double, "#pop"),
+ include("strings-double"),
(r"\n", Whitespace),
- ],
- "tsqs": [
- (r"'''", String.Single, "#pop"),
- include("strings-single"),
+ ],
+ "tsqs": [
+ (r"'''", String.Single, "#pop"),
+ include("strings-single"),
(r"\n", Whitespace),
- ],
- }
-
- def analyse_text(text):
- score = 0.0
-
- if re.search(
- r"func (_ready|_init|_input|_process|_unhandled_input)", text
- ):
- score += 0.8
-
- if re.search(
- r"(extends |class_name |onready |preload|load|setget|func [^_])",
- text
- ):
- score += 0.4
-
- if re.search(r"(var|const|enum|export|signal|tool)", text):
- score += 0.2
-
- return min(score, 1.0)
+ ],
+ }
+
+ def analyse_text(text):
+ score = 0.0
+
+ if re.search(
+ r"func (_ready|_init|_input|_process|_unhandled_input)", text
+ ):
+ score += 0.8
+
+ if re.search(
+ r"(extends |class_name |onready |preload|load|setget|func [^_])",
+ text
+ ):
+ score += 0.4
+
+ if re.search(r"(var|const|enum|export|signal|tool)", text):
+ score += 0.2
+
+ return min(score, 1.0)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/go.py b/contrib/python/Pygments/py3/pygments/lexers/go.py
index 33a57ce813..04324662b9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/go.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/go.py
@@ -4,7 +4,7 @@
Lexers for the Google Go language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -25,7 +25,7 @@ class GoLexer(RegexLexer):
"""
name = 'Go'
filenames = ['*.go']
- aliases = ['go', 'golang']
+ aliases = ['go', 'golang']
mimetypes = ['text/x-gosrc']
flags = re.MULTILINE | re.UNICODE
@@ -89,7 +89,7 @@ class GoLexer(RegexLexer):
# -- raw_string_lit
(r'`[^`]*`', String),
# -- interpreted_string_lit
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# Tokens
(r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|'
r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py b/contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py
index ff57c99917..f62898d7c4 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py
@@ -2,9 +2,9 @@
pygments.lexers.grammar_notation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- Lexers for grammar notations like BNF.
+ Lexers for grammar notations like BNF.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,12 +14,12 @@ from pygments.lexer import RegexLexer, bygroups, include, this, using, words
from pygments.token import Comment, Keyword, Literal, Name, Number, \
Operator, Punctuation, String, Text, Whitespace
-__all__ = ['BnfLexer', 'AbnfLexer', 'JsgfLexer', 'PegLexer']
+__all__ = ['BnfLexer', 'AbnfLexer', 'JsgfLexer', 'PegLexer']
class BnfLexer(RegexLexer):
"""
- This lexer is for grammar notations which are similar to
+ This lexer is for grammar notations which are similar to
original BNF.
In order to maximize a number of targets of this lexer,
@@ -210,60 +210,60 @@ class JsgfLexer(RegexLexer):
(r'.', Comment.Multiline),
],
}
-
-
-class PegLexer(RegexLexer):
- """
- This lexer is for `Parsing Expression Grammars
- <https://bford.info/pub/lang/peg.pdf>`_ (PEG).
-
- Various implementations of PEG have made different decisions
- regarding the syntax, so let's try to be accommodating:
-
- * `<-`, `←`, `:`, and `=` are all accepted as rule operators.
-
- * Both `|` and `/` are choice operators.
-
- * `^`, `↑`, and `~` are cut operators.
-
- * A single `a-z` character immediately before a string, or
- multiple `a-z` characters following a string, are part of the
- string (e.g., `r"..."` or `"..."ilmsuxa`).
-
- .. versionadded:: 2.6
- """
-
- name = 'PEG'
- aliases = ['peg']
- filenames = ['*.peg']
- mimetypes = ['text/x-peg']
-
- tokens = {
- 'root': [
- # Comments
+
+
+class PegLexer(RegexLexer):
+ """
+ This lexer is for `Parsing Expression Grammars
+ <https://bford.info/pub/lang/peg.pdf>`_ (PEG).
+
+ Various implementations of PEG have made different decisions
+ regarding the syntax, so let's try to be accommodating:
+
+ * `<-`, `←`, `:`, and `=` are all accepted as rule operators.
+
+ * Both `|` and `/` are choice operators.
+
+ * `^`, `↑`, and `~` are cut operators.
+
+ * A single `a-z` character immediately before a string, or
+ multiple `a-z` characters following a string, are part of the
+ string (e.g., `r"..."` or `"..."ilmsuxa`).
+
+ .. versionadded:: 2.6
+ """
+
+ name = 'PEG'
+ aliases = ['peg']
+ filenames = ['*.peg']
+ mimetypes = ['text/x-peg']
+
+ tokens = {
+ 'root': [
+ # Comments
(r'#.*$', Comment.Single),
-
- # All operators
- (r'<-|[←:=/|&!?*+^↑~]', Operator),
-
- # Other punctuation
- (r'[()]', Punctuation),
-
- # Keywords
- (r'\.', Keyword),
-
- # Character classes
- (r'(\[)([^\]]*(?:\\.[^\]\\]*)*)(\])',
- bygroups(Punctuation, String, Punctuation)),
-
- # Single and double quoted strings (with optional modifiers)
- (r'[a-z]?"[^"\\]*(?:\\.[^"\\]*)*"[a-z]*', String.Double),
- (r"[a-z]?'[^'\\]*(?:\\.[^'\\]*)*'[a-z]*", String.Single),
-
- # Nonterminals are not whitespace, operators, or punctuation
- (r'[^\s<←:=/|&!?*+\^↑~()\[\]"\'#]+', Name.Class),
-
- # Fallback
- (r'.', Text),
- ],
- }
+
+ # All operators
+ (r'<-|[←:=/|&!?*+^↑~]', Operator),
+
+ # Other punctuation
+ (r'[()]', Punctuation),
+
+ # Keywords
+ (r'\.', Keyword),
+
+ # Character classes
+ (r'(\[)([^\]]*(?:\\.[^\]\\]*)*)(\])',
+ bygroups(Punctuation, String, Punctuation)),
+
+ # Single and double quoted strings (with optional modifiers)
+ (r'[a-z]?"[^"\\]*(?:\\.[^"\\]*)*"[a-z]*', String.Double),
+ (r"[a-z]?'[^'\\]*(?:\\.[^'\\]*)*'[a-z]*", String.Single),
+
+ # Nonterminals are not whitespace, operators, or punctuation
+ (r'[^\s<←:=/|&!?*+\^↑~()\[\]"\'#]+', Name.Class),
+
+ # Fallback
+ (r'.', Text),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/graph.py b/contrib/python/Pygments/py3/pygments/lexers/graph.py
index 2af56af26b..acb82ee92f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/graph.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/graph.py
@@ -4,7 +4,7 @@
Lexers for graph query languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/graphics.py b/contrib/python/Pygments/py3/pygments/lexers/graphics.py
index 9f3e4a4431..7e750c2868 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/graphics.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/graphics.py
@@ -4,7 +4,7 @@
Lexers for computer graphics and plotting related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -407,7 +407,7 @@ class AsymptoteLexer(RegexLexer):
.. versionadded:: 1.2
"""
name = 'Asymptote'
- aliases = ['asymptote', 'asy']
+ aliases = ['asymptote', 'asy']
filenames = ['*.asy']
mimetypes = ['text/x-asymptote']
@@ -424,7 +424,7 @@ class AsymptoteLexer(RegexLexer):
],
'statements': [
# simple string (TeX friendly)
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# C style string (with character escapes)
(r"'", String, 'string'),
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
@@ -774,26 +774,26 @@ class PovrayLexer(RegexLexer):
(r'[0-9]+\.[0-9]*', Number.Float),
(r'\.[0-9]+', Number.Float),
(r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r'\s+', Whitespace),
]
}
-
- def analyse_text(text):
- """POVRAY is similar to JSON/C, but the combination of camera and
- light_source is probably not very likely elsewhere. HLSL or GLSL
- are similar (GLSL even has #version), but they miss #declare, and
- light_source/camera are not keywords anywhere else -- it's fair
- to assume though that any POVRAY scene must have a camera and
- lightsource."""
- result = 0
- if '#version' in text:
- result += 0.05
- if '#declare' in text:
- result += 0.05
- if 'camera' in text:
- result += 0.05
- if 'light_source' in text:
- result += 0.1
-
- return result
+
+ def analyse_text(text):
+ """POVRAY is similar to JSON/C, but the combination of camera and
+ light_source is probably not very likely elsewhere. HLSL or GLSL
+ are similar (GLSL even has #version), but they miss #declare, and
+ light_source/camera are not keywords anywhere else -- it's fair
+ to assume though that any POVRAY scene must have a camera and
+ lightsource."""
+ result = 0
+ if '#version' in text:
+ result += 0.05
+ if '#declare' in text:
+ result += 0.05
+ if 'camera' in text:
+ result += 0.05
+ if 'light_source' in text:
+ result += 0.1
+
+ return result
diff --git a/contrib/python/Pygments/py3/pygments/lexers/graphviz.py b/contrib/python/Pygments/py3/pygments/lexers/graphviz.py
index a685933744..da16ad193c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/graphviz.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/graphviz.py
@@ -1,58 +1,58 @@
-"""
- pygments.lexers.graphviz
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the DOT language (graphviz).
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, Keyword, Operator, Name, String, Number, \
- Punctuation, Whitespace
-
-
-__all__ = ['GraphvizLexer']
-
-
-class GraphvizLexer(RegexLexer):
- """
- For graphviz DOT graph description language.
-
- .. versionadded:: 2.8
- """
- name = 'Graphviz'
- aliases = ['graphviz', 'dot']
- filenames = ['*.gv', '*.dot']
- mimetypes = ['text/x-graphviz', 'text/vnd.graphviz']
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'(#|//).*?$', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'(?i)(node|edge|graph|digraph|subgraph|strict)\b', Keyword),
- (r'--|->', Operator),
- (r'[{}[\]:;,]', Punctuation),
- (r'(\b\D\w*)(\s*)(=)(\s*)',
- bygroups(Name.Attribute, Whitespace, Punctuation, Whitespace),
- 'attr_id'),
- (r'\b(n|ne|e|se|s|sw|w|nw|c|_)\b', Name.Builtin),
- (r'\b\D\w*', Name.Tag), # node
- (r'[-]?((\.[0-9]+)|([0-9]+(\.[0-9]*)?))', Number),
- (r'"(\\"|[^"])*?"', Name.Tag), # quoted node
- (r'<', Punctuation, 'xml'),
- ],
- 'attr_id': [
- (r'\b\D\w*', String, '#pop'),
- (r'[-]?((\.[0-9]+)|([0-9]+(\.[0-9]*)?))', Number, '#pop'),
- (r'"(\\"|[^"])*?"', String.Double, '#pop'),
- (r'<', Punctuation, ('#pop', 'xml')),
- ],
- 'xml': [
- (r'<', Punctuation, '#push'),
- (r'>', Punctuation, '#pop'),
- (r'\s+', Whitespace),
- (r'[^<>\s]', Name.Tag),
- ]
- }
+"""
+ pygments.lexers.graphviz
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the DOT language (graphviz).
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Comment, Keyword, Operator, Name, String, Number, \
+ Punctuation, Whitespace
+
+
+__all__ = ['GraphvizLexer']
+
+
+class GraphvizLexer(RegexLexer):
+ """
+ For graphviz DOT graph description language.
+
+ .. versionadded:: 2.8
+ """
+ name = 'Graphviz'
+ aliases = ['graphviz', 'dot']
+ filenames = ['*.gv', '*.dot']
+ mimetypes = ['text/x-graphviz', 'text/vnd.graphviz']
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+ (r'(#|//).*?$', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'(?i)(node|edge|graph|digraph|subgraph|strict)\b', Keyword),
+ (r'--|->', Operator),
+ (r'[{}[\]:;,]', Punctuation),
+ (r'(\b\D\w*)(\s*)(=)(\s*)',
+ bygroups(Name.Attribute, Whitespace, Punctuation, Whitespace),
+ 'attr_id'),
+ (r'\b(n|ne|e|se|s|sw|w|nw|c|_)\b', Name.Builtin),
+ (r'\b\D\w*', Name.Tag), # node
+ (r'[-]?((\.[0-9]+)|([0-9]+(\.[0-9]*)?))', Number),
+ (r'"(\\"|[^"])*?"', Name.Tag), # quoted node
+ (r'<', Punctuation, 'xml'),
+ ],
+ 'attr_id': [
+ (r'\b\D\w*', String, '#pop'),
+ (r'[-]?((\.[0-9]+)|([0-9]+(\.[0-9]*)?))', Number, '#pop'),
+ (r'"(\\"|[^"])*?"', String.Double, '#pop'),
+ (r'<', Punctuation, ('#pop', 'xml')),
+ ],
+ 'xml': [
+ (r'<', Punctuation, '#push'),
+ (r'>', Punctuation, '#pop'),
+ (r'\s+', Whitespace),
+ (r'[^<>\s]', Name.Tag),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/gsql.py b/contrib/python/Pygments/py3/pygments/lexers/gsql.py
index 3dd3ea1a6f..2c66d8c686 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/gsql.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/gsql.py
@@ -1,53 +1,53 @@
-"""
- pygments.lexers.gsql
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for TigerGraph GSQL graph query language
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this, words
-from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
- String, Number, Whitespace, Token
-
-
-__all__ = ["GSQLLexer"]
-
-class GSQLLexer(RegexLexer):
-
- """
- For `GSQL <https://docs.tigergraph.com/dev/gsql-ref>`_ queries (version 3.x).
- .. versionadded:: 2.10
- """
-
- name = 'GSQL'
- aliases = ['gsql']
- filenames = ['*.gsql']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- include('comment'),
- include('keywords'),
- include('clauses'),
- include('accums'),
- include('relations'),
- include('strings'),
- include('whitespace'),
- include('barewords'),
- include('operators'),
- ],
- 'comment': [
+"""
+ pygments.lexers.gsql
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for TigerGraph GSQL graph query language
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this, words
+from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
+ String, Number, Whitespace, Token
+
+
+__all__ = ["GSQLLexer"]
+
+class GSQLLexer(RegexLexer):
+
+ """
+ For `GSQL <https://docs.tigergraph.com/dev/gsql-ref>`_ queries (version 3.x).
+ .. versionadded:: 2.10
+ """
+
+ name = 'GSQL'
+ aliases = ['gsql']
+ filenames = ['*.gsql']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ include('comment'),
+ include('keywords'),
+ include('clauses'),
+ include('accums'),
+ include('relations'),
+ include('strings'),
+ include('whitespace'),
+ include('barewords'),
+ include('operators'),
+ ],
+ 'comment': [
(r'\#.*', Comment.Single),
(r'/\*(.|\n)*?\*/', Comment.Multiline),
- ],
- 'keywords': [
- (words((
+ ],
+ 'keywords': [
+ (words((
'ACCUM', 'AND', 'ANY', 'API', 'AS', 'ASC', 'AVG', 'BAG', 'BATCH', 'BETWEEN', 'BOOL', 'BOTH',
'BREAK', 'BY', 'CASE', 'CATCH', 'COALESCE', 'COMPRESS', 'CONTINUE', 'COUNT',
'CREATE', 'DATETIME', 'DATETIME_ADD', 'DATETIME_SUB', 'DELETE', 'DESC', 'DISTRIBUTED', 'DO',
@@ -58,34 +58,34 @@ class GSQLLexer(RegexLexer):
'NOW', 'NULL', 'OFFSET', 'OR', 'ORDER', 'PATH', 'PER', 'PINNED', 'POST_ACCUM', 'POST-ACCUM', 'PRIMARY_ID', 'PRINT',
'QUERY', 'RAISE', 'RANGE', 'REPLACE', 'RESET_COLLECTION_ACCUM', 'RETURN', 'RETURNS', 'RUN', 'SAMPLE', 'SELECT', 'SELECT_VERTEX',
'SET', 'SRC', 'STATIC', 'STRING', 'SUM', 'SYNTAX', 'TARGET', 'TAGSTGT', 'THEN', 'TO', 'TO_CSV', 'TO_DATETIME', 'TRAILING', 'TRIM', 'TRUE',
- 'TRY', 'TUPLE', 'TYPEDEF', 'UINT', 'UNION', 'UPDATE', 'VALUES', 'VERTEX', 'WHEN', 'WHERE', 'WHILE', 'WITH'), prefix=r'(?<!\.)', suffix=r'\b'), Token.Keyword)
- ],
- 'clauses': [
- (words(('accum', 'having', 'limit', 'order', 'postAccum', 'sample', 'where')), Name.Builtin)
- ],
- 'accums': [
+ 'TRY', 'TUPLE', 'TYPEDEF', 'UINT', 'UNION', 'UPDATE', 'VALUES', 'VERTEX', 'WHEN', 'WHERE', 'WHILE', 'WITH'), prefix=r'(?<!\.)', suffix=r'\b'), Token.Keyword)
+ ],
+ 'clauses': [
+ (words(('accum', 'having', 'limit', 'order', 'postAccum', 'sample', 'where')), Name.Builtin)
+ ],
+ 'accums': [
(words(('andaccum', 'arrayaccum', 'avgaccum', 'bagaccum', 'bitwiseandaccum',
'bitwiseoraccum', 'groupbyaccum', 'heapaccum', 'listaccum', 'MapAccum',
- 'maxaccum', 'minaccum', 'oraccum', 'setaccum', 'sumaccum')), Name.Builtin),
- ],
- 'relations': [
- (r'(-\s?)(\(.*\:\w?\))(\s?-)', bygroups(Operator, using(this), Operator)),
- (r'->|<-', Operator),
+ 'maxaccum', 'minaccum', 'oraccum', 'setaccum', 'sumaccum')), Name.Builtin),
+ ],
+ 'relations': [
+ (r'(-\s?)(\(.*\:\w?\))(\s?-)', bygroups(Operator, using(this), Operator)),
+ (r'->|<-', Operator),
(r'[.*{}\[\]\<\>\_]', Punctuation),
- ],
- 'strings': [
+ ],
+ 'strings': [
(r'"([^"\\]|\\.)*"', String),
- (r'@{1,2}\w+', Name.Variable),
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- ],
- 'barewords': [
- (r'[a-z]\w*', Name),
- (r'(\d+\.\d+|\d+)', Number),
- ],
- 'operators': [
+ (r'@{1,2}\w+', Name.Variable),
+ ],
+ 'whitespace': [
+ (r'\s+', Whitespace),
+ ],
+ 'barewords': [
+ (r'[a-z]\w*', Name),
+ (r'(\d+\.\d+|\d+)', Number),
+ ],
+ 'operators': [
(r'\$|[^0-9|\/|\-](\-\=|\+\=|\*\=|\\\=|\=|\=\=|\=\=\=|\+|\-|\*|\\|\+\=|\>|\<)[^\>|\/]', Operator),
(r'(\||\(|\)|\,|\;|\=|\-|\+|\*|\/|\>|\<|\:)', Operator),
- ],
- }
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/haskell.py b/contrib/python/Pygments/py3/pygments/lexers/haskell.py
index 6ab0f3340e..38e2f732bc 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/haskell.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/haskell.py
@@ -4,7 +4,7 @@
Lexers for Haskell and related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -65,7 +65,7 @@ class HaskellLexer(RegexLexer):
(r"(')[" + uni.Lu + r"][\w\']*", Keyword.Type),
(r"(')\[[^\]]*\]", Keyword.Type), # tuples and lists get special treatment in GHC
(r"(')\([^)]*\)", Keyword.Type), # ..
- (r"(')[:!#$%&*+.\\/<=>?@^|~-]+", Keyword.Type), # promoted type operators
+ (r"(')[:!#$%&*+.\\/<=>?@^|~-]+", Keyword.Type), # promoted type operators
# Operators
(r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
(r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
@@ -197,7 +197,7 @@ class IdrisLexer(RegexLexer):
'if', 'in', 'infix[lr]?', 'instance', 'rewrite', 'auto',
'namespace', 'codata', 'mutual', 'private', 'public', 'abstract',
'total', 'partial',
- 'interface', 'implementation', 'export', 'covering', 'constructor',
+ 'interface', 'implementation', 'export', 'covering', 'constructor',
'let', 'proof', 'of', 'then', 'static', 'where', '_', 'with',
'pattern', 'term', 'syntax', 'prefix',
'postulate', 'parameters', 'record', 'dsl', 'impossible', 'implicit',
@@ -214,7 +214,7 @@ class IdrisLexer(RegexLexer):
tokens = {
'root': [
# Comments
- (r'^(\s*)(%%(%s))' % '|'.join(directives),
+ (r'^(\s*)(%%(%s))' % '|'.join(directives),
bygroups(Whitespace, Keyword.Reserved)),
(r'(\s*)(--(?![!#$%&*+./<=>?@^|_~:\\]).*?)$', bygroups(Whitespace, Comment.Single)),
(r'(\s*)(\|{3}.*?)$', bygroups(Whitespace, Comment.Single)),
@@ -326,10 +326,10 @@ class AgdaLexer(RegexLexer):
# Identifiers
(r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
(r'(import|module)(\s+)', bygroups(Keyword.Reserved, Whitespace), 'module'),
- (r'\b(Set|Prop)[\u2080-\u2089]*\b', Keyword.Type),
+ (r'\b(Set|Prop)[\u2080-\u2089]*\b', Keyword.Type),
# Special Symbols
(r'(\(|\)|\{|\})', Operator),
- (r'(\.{1,3}|\||\u03BB|\u2200|\u2192|:|=|->)', Operator.Word),
+ (r'(\.{1,3}|\||\u03BB|\u2200|\u2192|:|=|->)', Operator.Word),
# Numbers
(r'\d+[eE][+-]?\d+', Number.Float),
(r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
@@ -482,10 +482,10 @@ class CryptolLexer(RegexLexer):
],
}
- EXTRA_KEYWORDS = {'join', 'split', 'reverse', 'transpose', 'width',
- 'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
- 'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
- 'trace'}
+ EXTRA_KEYWORDS = {'join', 'split', 'reverse', 'transpose', 'width',
+ 'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
+ 'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
+ 'trace'}
def get_tokens_unprocessed(self, text):
stack = ['root']
@@ -558,7 +558,7 @@ class LiterateLexer(Lexer):
latex += line
insertions.append((len(code),
list(lxlexer.get_tokens_unprocessed(latex))))
- yield from do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code))
+ yield from do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code))
class LiterateHaskellLexer(LiterateLexer):
@@ -575,7 +575,7 @@ class LiterateHaskellLexer(LiterateLexer):
.. versionadded:: 0.9
"""
name = 'Literate Haskell'
- aliases = ['literate-haskell', 'lhaskell', 'lhs']
+ aliases = ['literate-haskell', 'lhaskell', 'lhs']
filenames = ['*.lhs']
mimetypes = ['text/x-literate-haskell']
@@ -598,7 +598,7 @@ class LiterateIdrisLexer(LiterateLexer):
.. versionadded:: 2.0
"""
name = 'Literate Idris'
- aliases = ['literate-idris', 'lidris', 'lidr']
+ aliases = ['literate-idris', 'lidris', 'lidr']
filenames = ['*.lidr']
mimetypes = ['text/x-literate-idris']
@@ -621,7 +621,7 @@ class LiterateAgdaLexer(LiterateLexer):
.. versionadded:: 2.0
"""
name = 'Literate Agda'
- aliases = ['literate-agda', 'lagda']
+ aliases = ['literate-agda', 'lagda']
filenames = ['*.lagda']
mimetypes = ['text/x-literate-agda']
@@ -644,7 +644,7 @@ class LiterateCryptolLexer(LiterateLexer):
.. versionadded:: 2.0
"""
name = 'Literate Cryptol'
- aliases = ['literate-cryptol', 'lcryptol', 'lcry']
+ aliases = ['literate-cryptol', 'lcryptol', 'lcry']
filenames = ['*.lcry']
mimetypes = ['text/x-literate-cryptol']
diff --git a/contrib/python/Pygments/py3/pygments/lexers/haxe.py b/contrib/python/Pygments/py3/pygments/lexers/haxe.py
index ee587e99b7..270430cc4d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/haxe.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/haxe.py
@@ -4,7 +4,7 @@
Lexers for Haxe and related stuff.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -26,7 +26,7 @@ class HaxeLexer(ExtendedRegexLexer):
"""
name = 'Haxe'
- aliases = ['haxe', 'hxsl', 'hx']
+ aliases = ['haxe', 'hxsl', 'hx']
filenames = ['*.hx', '*.hxsl']
mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
@@ -78,7 +78,7 @@ class HaxeLexer(ExtendedRegexLexer):
if proc in ['error']:
ctx.stack.append('preproc-error')
- yield match.start(), Comment.Preproc, '#' + proc
+ yield match.start(), Comment.Preproc, '#' + proc
ctx.pos = match.end()
tokens = {
@@ -466,7 +466,7 @@ class HaxeLexer(ExtendedRegexLexer):
(r'"', String.Double, ('#pop', 'expr-chain', 'string-double')),
# EReg
- (r'~/(\\\\|\\[^\\]|[^/\\\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')),
+ (r'~/(\\\\|\\[^\\]|[^/\\\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')),
# Array
(r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')),
@@ -721,7 +721,7 @@ class HaxeLexer(ExtendedRegexLexer):
(r'"', String.Double, ('#pop', 'string-double')),
# EReg
- (r'~/(\\\\|\\[^\\]|[^/\\\n])*/[gim]*', String.Regex, '#pop'),
+ (r'~/(\\\\|\\[^\\]|[^/\\\n])*/[gim]*', String.Regex, '#pop'),
# Array
(r'\[', Operator, ('#pop', 'array-decl')),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/hdl.py b/contrib/python/Pygments/py3/pygments/lexers/hdl.py
index e96f79a475..32e87bd828 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/hdl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/hdl.py
@@ -4,7 +4,7 @@
Lexers for hardware descriptor languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -102,12 +102,12 @@ class VerilogLexer(RegexLexer):
(words((
'byte', 'shortint', 'int', 'longint', 'integer', 'time',
'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand',
- 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wor'
+ 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wor'
'shortreal', 'real', 'realtime'), suffix=r'\b'),
Keyword.Type),
(r'[a-zA-Z_]\w*:(?!:)', Name.Label),
(r'\$?[a-zA-Z_]\w*', Name),
- (r'\\(\S+)', Name),
+ (r'\\(\S+)', Name),
],
'string': [
(r'"', String, '#pop'),
@@ -129,20 +129,20 @@ class VerilogLexer(RegexLexer):
]
}
- def analyse_text(text):
- """Verilog code will use one of reg/wire/assign for sure, and that
- is not common elsewhere."""
- result = 0
- if 'reg' in text:
- result += 0.1
- if 'wire' in text:
- result += 0.1
- if 'assign' in text:
- result += 0.1
-
- return result
-
-
+ def analyse_text(text):
+ """Verilog code will use one of reg/wire/assign for sure, and that
+ is not common elsewhere."""
+ result = 0
+ if 'reg' in text:
+ result += 0.1
+ if 'wire' in text:
+ result += 0.1
+ if 'assign' in text:
+ result += 0.1
+
+ return result
+
+
class SystemVerilogLexer(RegexLexer):
"""
Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
@@ -171,183 +171,183 @@ class SystemVerilogLexer(RegexLexer):
(r'[{}#@]', Punctuation),
(r'L?"', String, 'string'),
(r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-
+
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-
- (r'([1-9][_0-9]*)?\s*\'[sS]?[bB]\s*[xXzZ?01][_xXzZ?01]*',
- Number.Bin),
- (r'([1-9][_0-9]*)?\s*\'[sS]?[oO]\s*[xXzZ?0-7][_xXzZ?0-7]*',
- Number.Oct),
- (r'([1-9][_0-9]*)?\s*\'[sS]?[dD]\s*[xXzZ?0-9][_xXzZ?0-9]*',
- Number.Integer),
- (r'([1-9][_0-9]*)?\s*\'[sS]?[hH]\s*[xXzZ?0-9a-fA-F][_xXzZ?0-9a-fA-F]*',
- Number.Hex),
-
- (r'\'[01xXzZ]', Number),
- (r'[0-9][_0-9]*', Number.Integer),
-
+
+ (r'([1-9][_0-9]*)?\s*\'[sS]?[bB]\s*[xXzZ?01][_xXzZ?01]*',
+ Number.Bin),
+ (r'([1-9][_0-9]*)?\s*\'[sS]?[oO]\s*[xXzZ?0-7][_xXzZ?0-7]*',
+ Number.Oct),
+ (r'([1-9][_0-9]*)?\s*\'[sS]?[dD]\s*[xXzZ?0-9][_xXzZ?0-9]*',
+ Number.Integer),
+ (r'([1-9][_0-9]*)?\s*\'[sS]?[hH]\s*[xXzZ?0-9a-fA-F][_xXzZ?0-9a-fA-F]*',
+ Number.Hex),
+
+ (r'\'[01xXzZ]', Number),
+ (r'[0-9][_0-9]*', Number.Integer),
+
(r'[~!%^&*+=|?:<>/-]', Operator),
- (words(('inside', 'dist'), suffix=r'\b'), Operator.Word),
-
- (r'[()\[\],.;\'$]', Punctuation),
+ (words(('inside', 'dist'), suffix=r'\b'), Operator.Word),
+
+ (r'[()\[\],.;\'$]', Punctuation),
(r'`[a-zA-Z_]\w*', Name.Constant),
(words((
- 'accept_on', 'alias', 'always', 'always_comb', 'always_ff',
- 'always_latch', 'and', 'assert', 'assign', 'assume', 'automatic',
- 'before', 'begin', 'bind', 'bins', 'binsof', 'break', 'buf',
- 'bufif0', 'bufif1', 'case', 'casex', 'casez', 'cell',
- 'checker', 'clocking', 'cmos', 'config',
- 'constraint', 'context', 'continue', 'cover', 'covergroup',
- 'coverpoint', 'cross', 'deassign', 'default', 'defparam', 'design',
- 'disable', 'do', 'edge', 'else', 'end', 'endcase',
- 'endchecker', 'endclocking', 'endconfig', 'endfunction',
- 'endgenerate', 'endgroup', 'endinterface', 'endmodule', 'endpackage',
- 'endprimitive', 'endprogram', 'endproperty', 'endsequence',
- 'endspecify', 'endtable', 'endtask', 'enum', 'eventually',
- 'expect', 'export', 'extern', 'final', 'first_match',
- 'for', 'force', 'foreach', 'forever', 'fork', 'forkjoin', 'function',
- 'generate', 'genvar', 'global', 'highz0', 'highz1', 'if', 'iff',
- 'ifnone', 'ignore_bins', 'illegal_bins', 'implies', 'implements', 'import',
- 'incdir', 'include', 'initial', 'inout', 'input',
- 'instance', 'interconnect', 'interface', 'intersect', 'join',
- 'join_any', 'join_none', 'large', 'let', 'liblist', 'library',
- 'local', 'localparam', 'macromodule', 'matches',
- 'medium', 'modport', 'module', 'nand', 'negedge', 'nettype', 'new', 'nexttime',
- 'nmos', 'nor', 'noshowcancelled', 'not', 'notif0', 'notif1', 'null',
- 'or', 'output', 'package', 'packed', 'parameter', 'pmos', 'posedge',
- 'primitive', 'priority', 'program', 'property', 'protected', 'pull0',
- 'pull1', 'pulldown', 'pullup', 'pulsestyle_ondetect',
- 'pulsestyle_onevent', 'pure', 'rand', 'randc', 'randcase',
- 'randsequence', 'rcmos', 'ref',
- 'reject_on', 'release', 'repeat', 'restrict', 'return', 'rnmos',
- 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 's_always', 's_eventually',
- 's_nexttime', 's_until', 's_until_with', 'scalared', 'sequence',
- 'showcancelled', 'small', 'soft', 'solve',
- 'specify', 'specparam', 'static', 'strong', 'strong0',
- 'strong1', 'struct', 'super', 'sync_accept_on',
- 'sync_reject_on', 'table', 'tagged', 'task', 'this', 'throughout',
- 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1',
- 'typedef', 'union', 'unique', 'unique0', 'until',
- 'until_with', 'untyped', 'use', 'vectored',
- 'virtual', 'wait', 'wait_order', 'weak', 'weak0',
- 'weak1', 'while', 'wildcard', 'with', 'within',
- 'xnor', 'xor'),
- suffix=r'\b'),
+ 'accept_on', 'alias', 'always', 'always_comb', 'always_ff',
+ 'always_latch', 'and', 'assert', 'assign', 'assume', 'automatic',
+ 'before', 'begin', 'bind', 'bins', 'binsof', 'break', 'buf',
+ 'bufif0', 'bufif1', 'case', 'casex', 'casez', 'cell',
+ 'checker', 'clocking', 'cmos', 'config',
+ 'constraint', 'context', 'continue', 'cover', 'covergroup',
+ 'coverpoint', 'cross', 'deassign', 'default', 'defparam', 'design',
+ 'disable', 'do', 'edge', 'else', 'end', 'endcase',
+ 'endchecker', 'endclocking', 'endconfig', 'endfunction',
+ 'endgenerate', 'endgroup', 'endinterface', 'endmodule', 'endpackage',
+ 'endprimitive', 'endprogram', 'endproperty', 'endsequence',
+ 'endspecify', 'endtable', 'endtask', 'enum', 'eventually',
+ 'expect', 'export', 'extern', 'final', 'first_match',
+ 'for', 'force', 'foreach', 'forever', 'fork', 'forkjoin', 'function',
+ 'generate', 'genvar', 'global', 'highz0', 'highz1', 'if', 'iff',
+ 'ifnone', 'ignore_bins', 'illegal_bins', 'implies', 'implements', 'import',
+ 'incdir', 'include', 'initial', 'inout', 'input',
+ 'instance', 'interconnect', 'interface', 'intersect', 'join',
+ 'join_any', 'join_none', 'large', 'let', 'liblist', 'library',
+ 'local', 'localparam', 'macromodule', 'matches',
+ 'medium', 'modport', 'module', 'nand', 'negedge', 'nettype', 'new', 'nexttime',
+ 'nmos', 'nor', 'noshowcancelled', 'not', 'notif0', 'notif1', 'null',
+ 'or', 'output', 'package', 'packed', 'parameter', 'pmos', 'posedge',
+ 'primitive', 'priority', 'program', 'property', 'protected', 'pull0',
+ 'pull1', 'pulldown', 'pullup', 'pulsestyle_ondetect',
+ 'pulsestyle_onevent', 'pure', 'rand', 'randc', 'randcase',
+ 'randsequence', 'rcmos', 'ref',
+ 'reject_on', 'release', 'repeat', 'restrict', 'return', 'rnmos',
+ 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 's_always', 's_eventually',
+ 's_nexttime', 's_until', 's_until_with', 'scalared', 'sequence',
+ 'showcancelled', 'small', 'soft', 'solve',
+ 'specify', 'specparam', 'static', 'strong', 'strong0',
+ 'strong1', 'struct', 'super', 'sync_accept_on',
+ 'sync_reject_on', 'table', 'tagged', 'task', 'this', 'throughout',
+ 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1',
+ 'typedef', 'union', 'unique', 'unique0', 'until',
+ 'until_with', 'untyped', 'use', 'vectored',
+ 'virtual', 'wait', 'wait_order', 'weak', 'weak0',
+ 'weak1', 'while', 'wildcard', 'with', 'within',
+ 'xnor', 'xor'),
+ suffix=r'\b'),
Keyword),
- (r'(class)(\s+)([a-zA-Z_]\w*)',
+ (r'(class)(\s+)([a-zA-Z_]\w*)',
bygroups(Keyword.Declaration, Whitespace, Name.Class)),
- (r'(extends)(\s+)([a-zA-Z_]\w*)',
+ (r'(extends)(\s+)([a-zA-Z_]\w*)',
bygroups(Keyword.Declaration, Whitespace, Name.Class)),
- (r'(endclass\b)(?:(\s*)(:)(\s*)([a-zA-Z_]\w*))?',
+ (r'(endclass\b)(?:(\s*)(:)(\s*)([a-zA-Z_]\w*))?',
bygroups(Keyword.Declaration, Whitespace, Punctuation, Whitespace, Name.Class)),
-
- (words((
- # Variable types
- 'bit', 'byte', 'chandle', 'const', 'event', 'int', 'integer',
- 'logic', 'longint', 'real', 'realtime', 'reg', 'shortint',
- 'shortreal', 'signed', 'string', 'time', 'type', 'unsigned',
- 'var', 'void',
- # Net types
- 'supply0', 'supply1', 'tri', 'triand', 'trior', 'trireg',
- 'tri0', 'tri1', 'uwire', 'wand', 'wire', 'wor'),
- suffix=r'\b'),
- Keyword.Type),
-
+
(words((
- '`__FILE__', '`__LINE__', '`begin_keywords', '`celldefine',
- '`default_nettype', '`define', '`else', '`elsif', '`end_keywords',
- '`endcelldefine', '`endif', '`ifdef', '`ifndef', '`include',
- '`line', '`nounconnected_drive', '`pragma', '`resetall',
- '`timescale', '`unconnected_drive', '`undef', '`undefineall'),
+ # Variable types
+ 'bit', 'byte', 'chandle', 'const', 'event', 'int', 'integer',
+ 'logic', 'longint', 'real', 'realtime', 'reg', 'shortint',
+ 'shortreal', 'signed', 'string', 'time', 'type', 'unsigned',
+ 'var', 'void',
+ # Net types
+ 'supply0', 'supply1', 'tri', 'triand', 'trior', 'trireg',
+ 'tri0', 'tri1', 'uwire', 'wand', 'wire', 'wor'),
+ suffix=r'\b'),
+ Keyword.Type),
+
+ (words((
+ '`__FILE__', '`__LINE__', '`begin_keywords', '`celldefine',
+ '`default_nettype', '`define', '`else', '`elsif', '`end_keywords',
+ '`endcelldefine', '`endif', '`ifdef', '`ifndef', '`include',
+ '`line', '`nounconnected_drive', '`pragma', '`resetall',
+ '`timescale', '`unconnected_drive', '`undef', '`undefineall'),
suffix=r'\b'),
Comment.Preproc),
(words((
- # Simulation control tasks (20.2)
- '$exit', '$finish', '$stop',
- # Simulation time functions (20.3)
- '$realtime', '$stime', '$time',
- # Timescale tasks (20.4)
- '$printtimescale', '$timeformat',
- # Conversion functions
- '$bitstoreal', '$bitstoshortreal', '$cast', '$itor',
- '$realtobits', '$rtoi', '$shortrealtobits', '$signed',
- '$unsigned',
- # Data query functions (20.6)
- '$bits', '$isunbounded', '$typename',
- # Array query functions (20.7)
- '$dimensions', '$high', '$increment', '$left', '$low', '$right',
- '$size', '$unpacked_dimensions',
- # Math functions (20.8)
- '$acos', '$acosh', '$asin', '$asinh', '$atan', '$atan2',
- '$atanh', '$ceil', '$clog2', '$cos', '$cosh', '$exp', '$floor',
- '$hypot', '$ln', '$log10', '$pow', '$sin', '$sinh', '$sqrt',
- '$tan', '$tanh',
- # Bit vector system functions (20.9)
- '$countbits', '$countones', '$isunknown', '$onehot', '$onehot0',
- # Severity tasks (20.10)
- '$info', '$error', '$fatal', '$warning',
- # Assertion control tasks (20.12)
- '$assertcontrol', '$assertfailoff', '$assertfailon',
- '$assertkill', '$assertnonvacuouson', '$assertoff', '$asserton',
- '$assertpassoff', '$assertpasson', '$assertvacuousoff',
- # Sampled value system functions (20.13)
- '$changed', '$changed_gclk', '$changing_gclk', '$falling_gclk',
- '$fell', '$fell_gclk', '$future_gclk', '$past', '$past_gclk',
- '$rising_gclk', '$rose', '$rose_gclk', '$sampled', '$stable',
- '$stable_gclk', '$steady_gclk',
- # Coverage control functions (20.14)
- '$coverage_control', '$coverage_get', '$coverage_get_max',
- '$coverage_merge', '$coverage_save', '$get_coverage',
- '$load_coverage_db', '$set_coverage_db_name',
- # Probabilistic distribution functions (20.15)
- '$dist_chi_square', '$dist_erlang', '$dist_exponential',
- '$dist_normal', '$dist_poisson', '$dist_t', '$dist_uniform',
- '$random',
- # Stochastic analysis tasks and functions (20.16)
- '$q_add', '$q_exam', '$q_full', '$q_initialize', '$q_remove',
- # PLA modeling tasks (20.17)
- '$async$and$array', '$async$and$plane', '$async$nand$array',
- '$async$nand$plane', '$async$nor$array', '$async$nor$plane',
- '$async$or$array', '$async$or$plane', '$sync$and$array',
- '$sync$and$plane', '$sync$nand$array', '$sync$nand$plane',
- '$sync$nor$array', '$sync$nor$plane', '$sync$or$array',
- '$sync$or$plane',
- # Miscellaneous tasks and functions (20.18)
- '$system',
- # Display tasks (21.2)
- '$display', '$displayb', '$displayh', '$displayo', '$monitor',
- '$monitorb', '$monitorh', '$monitoro', '$monitoroff',
- '$monitoron', '$strobe', '$strobeb', '$strobeh', '$strobeo',
- '$write', '$writeb', '$writeh', '$writeo',
- # File I/O tasks and functions (21.3)
- '$fclose', '$fdisplay', '$fdisplayb', '$fdisplayh',
- '$fdisplayo', '$feof', '$ferror', '$fflush', '$fgetc', '$fgets',
- '$fmonitor', '$fmonitorb', '$fmonitorh', '$fmonitoro', '$fopen',
- '$fread', '$fscanf', '$fseek', '$fstrobe', '$fstrobeb',
- '$fstrobeh', '$fstrobeo', '$ftell', '$fwrite', '$fwriteb',
- '$fwriteh', '$fwriteo', '$rewind', '$sformat', '$sformatf',
- '$sscanf', '$swrite', '$swriteb', '$swriteh', '$swriteo',
- '$ungetc',
- # Memory load tasks (21.4)
- '$readmemb', '$readmemh',
- # Memory dump tasks (21.5)
- '$writememb', '$writememh',
- # Command line input (21.6)
- '$test$plusargs', '$value$plusargs',
- # VCD tasks (21.7)
- '$dumpall', '$dumpfile', '$dumpflush', '$dumplimit', '$dumpoff',
- '$dumpon', '$dumpports', '$dumpportsall', '$dumpportsflush',
- '$dumpportslimit', '$dumpportsoff', '$dumpportson', '$dumpvars',
- ), suffix=r'\b'),
+ # Simulation control tasks (20.2)
+ '$exit', '$finish', '$stop',
+ # Simulation time functions (20.3)
+ '$realtime', '$stime', '$time',
+ # Timescale tasks (20.4)
+ '$printtimescale', '$timeformat',
+ # Conversion functions
+ '$bitstoreal', '$bitstoshortreal', '$cast', '$itor',
+ '$realtobits', '$rtoi', '$shortrealtobits', '$signed',
+ '$unsigned',
+ # Data query functions (20.6)
+ '$bits', '$isunbounded', '$typename',
+ # Array query functions (20.7)
+ '$dimensions', '$high', '$increment', '$left', '$low', '$right',
+ '$size', '$unpacked_dimensions',
+ # Math functions (20.8)
+ '$acos', '$acosh', '$asin', '$asinh', '$atan', '$atan2',
+ '$atanh', '$ceil', '$clog2', '$cos', '$cosh', '$exp', '$floor',
+ '$hypot', '$ln', '$log10', '$pow', '$sin', '$sinh', '$sqrt',
+ '$tan', '$tanh',
+ # Bit vector system functions (20.9)
+ '$countbits', '$countones', '$isunknown', '$onehot', '$onehot0',
+ # Severity tasks (20.10)
+ '$info', '$error', '$fatal', '$warning',
+ # Assertion control tasks (20.12)
+ '$assertcontrol', '$assertfailoff', '$assertfailon',
+ '$assertkill', '$assertnonvacuouson', '$assertoff', '$asserton',
+ '$assertpassoff', '$assertpasson', '$assertvacuousoff',
+ # Sampled value system functions (20.13)
+ '$changed', '$changed_gclk', '$changing_gclk', '$falling_gclk',
+ '$fell', '$fell_gclk', '$future_gclk', '$past', '$past_gclk',
+ '$rising_gclk', '$rose', '$rose_gclk', '$sampled', '$stable',
+ '$stable_gclk', '$steady_gclk',
+ # Coverage control functions (20.14)
+ '$coverage_control', '$coverage_get', '$coverage_get_max',
+ '$coverage_merge', '$coverage_save', '$get_coverage',
+ '$load_coverage_db', '$set_coverage_db_name',
+ # Probabilistic distribution functions (20.15)
+ '$dist_chi_square', '$dist_erlang', '$dist_exponential',
+ '$dist_normal', '$dist_poisson', '$dist_t', '$dist_uniform',
+ '$random',
+ # Stochastic analysis tasks and functions (20.16)
+ '$q_add', '$q_exam', '$q_full', '$q_initialize', '$q_remove',
+ # PLA modeling tasks (20.17)
+ '$async$and$array', '$async$and$plane', '$async$nand$array',
+ '$async$nand$plane', '$async$nor$array', '$async$nor$plane',
+ '$async$or$array', '$async$or$plane', '$sync$and$array',
+ '$sync$and$plane', '$sync$nand$array', '$sync$nand$plane',
+ '$sync$nor$array', '$sync$nor$plane', '$sync$or$array',
+ '$sync$or$plane',
+ # Miscellaneous tasks and functions (20.18)
+ '$system',
+ # Display tasks (21.2)
+ '$display', '$displayb', '$displayh', '$displayo', '$monitor',
+ '$monitorb', '$monitorh', '$monitoro', '$monitoroff',
+ '$monitoron', '$strobe', '$strobeb', '$strobeh', '$strobeo',
+ '$write', '$writeb', '$writeh', '$writeo',
+ # File I/O tasks and functions (21.3)
+ '$fclose', '$fdisplay', '$fdisplayb', '$fdisplayh',
+ '$fdisplayo', '$feof', '$ferror', '$fflush', '$fgetc', '$fgets',
+ '$fmonitor', '$fmonitorb', '$fmonitorh', '$fmonitoro', '$fopen',
+ '$fread', '$fscanf', '$fseek', '$fstrobe', '$fstrobeb',
+ '$fstrobeh', '$fstrobeo', '$ftell', '$fwrite', '$fwriteb',
+ '$fwriteh', '$fwriteo', '$rewind', '$sformat', '$sformatf',
+ '$sscanf', '$swrite', '$swriteb', '$swriteh', '$swriteo',
+ '$ungetc',
+ # Memory load tasks (21.4)
+ '$readmemb', '$readmemh',
+ # Memory dump tasks (21.5)
+ '$writememb', '$writememh',
+ # Command line input (21.6)
+ '$test$plusargs', '$value$plusargs',
+ # VCD tasks (21.7)
+ '$dumpall', '$dumpfile', '$dumpflush', '$dumplimit', '$dumpoff',
+ '$dumpon', '$dumpports', '$dumpportsall', '$dumpportsflush',
+ '$dumpportslimit', '$dumpportsoff', '$dumpportson', '$dumpvars',
+ ), suffix=r'\b'),
Name.Builtin),
(r'[a-zA-Z_]\w*:(?!:)', Name.Label),
(r'\$?[a-zA-Z_]\w*', Name),
- (r'\\(\S+)', Name),
+ (r'\\(\S+)', Name),
],
'string': [
(r'"', String, '#pop'),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/hexdump.py b/contrib/python/Pygments/py3/pygments/lexers/hexdump.py
index 041d7f6c25..e9185d29b4 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/hexdump.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/hexdump.py
@@ -4,7 +4,7 @@
Lexers for hexadecimal dumps.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/html.py b/contrib/python/Pygments/py3/pygments/lexers/html.py
index 2e29f453cd..db4c8a76a9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/html.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/html.py
@@ -4,7 +4,7 @@
Lexers for HTML, XML and related markup.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -71,24 +71,24 @@ class HtmlLexer(RegexLexer):
bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
Punctuation), '#pop'),
(r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
- # fallback cases for when there is no closing script tag
- # first look for newline and then go back into root state
- # if that fails just read the rest of the file
- # this is similar to the error handling logic in lexer.py
- (r'.+?\n', using(JavascriptLexer), '#pop'),
- (r'.+', using(JavascriptLexer), '#pop'),
+ # fallback cases for when there is no closing script tag
+ # first look for newline and then go back into root state
+ # if that fails just read the rest of the file
+ # this is similar to the error handling logic in lexer.py
+ (r'.+?\n', using(JavascriptLexer), '#pop'),
+ (r'.+', using(JavascriptLexer), '#pop'),
],
'style-content': [
(r'(<)(\s*)(/)(\s*)(style)(\s*)(>)',
bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
Punctuation),'#pop'),
(r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
- # fallback cases for when there is no closing style tag
- # first look for newline and then go back into root state
- # if that fails just read the rest of the file
- # this is similar to the error handling logic in lexer.py
- (r'.+?\n', using(CssLexer), '#pop'),
- (r'.+', using(CssLexer), '#pop'),
+ # fallback cases for when there is no closing style tag
+ # first look for newline and then go back into root state
+ # if that fails just read the rest of the file
+ # this is similar to the error handling logic in lexer.py
+ (r'.+?\n', using(CssLexer), '#pop'),
+ (r'.+', using(CssLexer), '#pop'),
],
'attr': [
('".*?"', String, '#pop'),
@@ -245,7 +245,7 @@ class XsltLexer(XmlLexer):
filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
mimetypes = ['application/xsl+xml', 'application/xslt+xml']
- EXTRA_KEYWORDS = {
+ EXTRA_KEYWORDS = {
'apply-imports', 'apply-templates', 'attribute',
'attribute-set', 'call-template', 'choose', 'comment',
'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
@@ -254,7 +254,7 @@ class XsltLexer(XmlLexer):
'preserve-space', 'processing-instruction', 'sort',
'strip-space', 'stylesheet', 'template', 'text', 'transform',
'value-of', 'variable', 'when', 'with-param'
- }
+ }
def get_tokens_unprocessed(self, text):
for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
@@ -358,8 +358,8 @@ class HamlLexer(ExtendedRegexLexer):
(r'\w+', Name.Variable, '#pop'),
(r'@\w+', Name.Variable.Instance, '#pop'),
(r'\$\w+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'),
- (r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'),
+ (r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'),
+ (r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'),
],
'html-comment-block': [
@@ -470,8 +470,8 @@ class ScamlLexer(ExtendedRegexLexer):
(r'\w+', Name.Variable, '#pop'),
(r'@\w+', Name.Variable.Instance, '#pop'),
(r'\$\w+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'),
- (r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'),
+ (r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'),
+ (r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'),
],
'html-comment-block': [
@@ -579,8 +579,8 @@ class PugLexer(ExtendedRegexLexer):
(r'\w+', Name.Variable, '#pop'),
(r'@\w+', Name.Variable.Instance, '#pop'),
(r'\$\w+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'),
- (r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'),
+ (r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'),
+ (r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'),
],
'html-comment-block': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/idl.py b/contrib/python/Pygments/py3/pygments/lexers/idl.py
index 22b8346ac3..3873d3cd76 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/idl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/idl.py
@@ -4,7 +4,7 @@
Lexers for IDL.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -31,7 +31,7 @@ class IDLLexer(RegexLexer):
_RESERVED = (
'and', 'begin', 'break', 'case', 'common', 'compile_opt',
- 'continue', 'do', 'else', 'end', 'endcase', 'endelse',
+ 'continue', 'do', 'else', 'end', 'endcase', 'endelse',
'endfor', 'endforeach', 'endif', 'endrep', 'endswitch',
'endwhile', 'eq', 'for', 'foreach', 'forward_function',
'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le',
@@ -267,14 +267,14 @@ class IDLLexer(RegexLexer):
(r'.', Text),
]
}
-
- def analyse_text(text):
- """endelse seems to be unique to IDL, endswitch is rare at least."""
- result = 0
-
- if 'endelse' in text:
- result += 0.2
- if 'endswitch' in text:
- result += 0.01
-
- return result \ No newline at end of file
+
+ def analyse_text(text):
+ """endelse seems to be unique to IDL, endswitch is rare at least."""
+ result = 0
+
+ if 'endelse' in text:
+ result += 0.2
+ if 'endswitch' in text:
+ result += 0.01
+
+ return result \ No newline at end of file
diff --git a/contrib/python/Pygments/py3/pygments/lexers/igor.py b/contrib/python/Pygments/py3/pygments/lexers/igor.py
index e843d081f1..a5f6d6e1bd 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/igor.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/igor.py
@@ -4,7 +4,7 @@
Lexers for Igor Pro.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -390,10 +390,10 @@ class IgorLexer(RegexLexer):
'WaveRefIndexedDFR', 'WaveRefsEqual', 'WaveRefWaveToList', 'WaveTextEncoding',
'WaveType', 'WaveUnits', 'WhichListItem', 'WinList', 'WinName', 'WinRecreation',
'WinType', 'wnoise', 'xcsr', 'XWaveName', 'XWaveRefFromTrace', 'x2pnt', 'zcsr',
- 'ZernikeR', 'zeromq_client_connect', 'zeromq_client_recv',
- 'zeromq_client_send', 'zeromq_handler_start', 'zeromq_handler_stop',
- 'zeromq_server_bind', 'zeromq_server_recv', 'zeromq_server_send', 'zeromq_set',
- 'zeromq_stop', 'zeromq_test_callfunction', 'zeromq_test_serializeWave', 'zeta'
+ 'ZernikeR', 'zeromq_client_connect', 'zeromq_client_recv',
+ 'zeromq_client_send', 'zeromq_handler_start', 'zeromq_handler_stop',
+ 'zeromq_server_bind', 'zeromq_server_recv', 'zeromq_server_send', 'zeromq_set',
+ 'zeromq_stop', 'zeromq_test_callfunction', 'zeromq_test_serializeWave', 'zeta'
)
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/inferno.py b/contrib/python/Pygments/py3/pygments/lexers/inferno.py
index befe42ab51..3cfa4d8508 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/inferno.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/inferno.py
@@ -4,7 +4,7 @@
Lexers for Inferno os and all the related stuff.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/installers.py b/contrib/python/Pygments/py3/pygments/lexers/installers.py
index 1f7b283146..4a72696c8d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/installers.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/installers.py
@@ -4,7 +4,7 @@
Lexers for installer/packager DSLs and formats.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -224,7 +224,7 @@ class SourcesListLexer(RegexLexer):
"""
name = 'Debian Sourcelist'
- aliases = ['debsources', 'sourceslist', 'sources.list']
+ aliases = ['debsources', 'sourceslist', 'sources.list']
filenames = ['sources.list']
mimetype = ['application/x-debian-sourceslist']
@@ -274,7 +274,7 @@ class DebianControlLexer(RegexLexer):
.. versionadded:: 0.9
"""
name = 'Debian Control file'
- aliases = ['debcontrol', 'control']
+ aliases = ['debcontrol', 'control']
filenames = ['control']
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/int_fiction.py b/contrib/python/Pygments/py3/pygments/lexers/int_fiction.py
index c309d19248..c4dcb0129f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/int_fiction.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/int_fiction.py
@@ -4,7 +4,7 @@
Lexers for interactive fiction languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -37,10 +37,10 @@ class Inform6Lexer(RegexLexer):
# Inform 7 maps these four character classes to their ASCII
# equivalents. To support Inform 6 inclusions within Inform 7,
# Inform6Lexer maps them too.
- _dash = '\\-\u2010-\u2014'
- _dquote = '"\u201c\u201d'
- _squote = "'\u2018\u2019"
- _newline = '\\n\u0085\u2028\u2029'
+ _dash = '\\-\u2010-\u2014'
+ _dquote = '"\u201c\u201d'
+ _squote = "'\u2018\u2019"
+ _newline = '\\n\u0085\u2028\u2029'
tokens = {
'root': [
@@ -117,7 +117,7 @@ class Inform6Lexer(RegexLexer):
include('_whitespace'),
# Strings
(r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'),
- (r'([%s])(@\{[0-9a-fA-F]*\})([%s])' % (_squote, _squote),
+ (r'([%s])(@\{[0-9a-fA-F]*\})([%s])' % (_squote, _squote),
bygroups(String.Char, String.Escape, String.Char), '#pop'),
(r'([%s])(@.{2})([%s])' % (_squote, _squote),
bygroups(String.Char, String.Escape, String.Char), '#pop'),
@@ -179,7 +179,7 @@ class Inform6Lexer(RegexLexer):
(r'[~^]+', String.Escape),
(r'[^~^\\@({%s]+' % _squote, String.Single),
(r'[({]', String.Single),
- (r'@\{[0-9a-fA-F]*\}', String.Escape),
+ (r'@\{[0-9a-fA-F]*\}', String.Escape),
(r'@.{2}', String.Escape),
(r'[%s]' % _squote, String.Single, '#pop')
],
@@ -190,7 +190,7 @@ class Inform6Lexer(RegexLexer):
(r'\\', String.Escape),
(r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' %
(_newline, _newline), String.Escape),
- (r'@(\\\s*[%s]\s*)*\{((\\\s*[%s]\s*)*[0-9a-fA-F])*'
+ (r'@(\\\s*[%s]\s*)*\{((\\\s*[%s]\s*)*[0-9a-fA-F])*'
r'(\\\s*[%s]\s*)*\}' % (_newline, _newline, _newline),
String.Escape),
(r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline),
@@ -256,8 +256,8 @@ class Inform6Lexer(RegexLexer):
(r'(?i)(extend|verb)\b', Keyword, 'grammar'),
(r'(?i)fake_action\b', Keyword, ('default', '_constant')),
(r'(?i)import\b', Keyword, 'manifest'),
- (r'(?i)(include|link|origsource)\b', Keyword,
- ('default', 'before-plain-string?')),
+ (r'(?i)(include|link|origsource)\b', Keyword,
+ ('default', 'before-plain-string?')),
(r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')),
(r'(?i)message\b', Keyword, ('default', 'diagnostic')),
(r'(?i)(nearby|object)\b', Keyword,
@@ -364,12 +364,12 @@ class Inform6Lexer(RegexLexer):
'diagnostic': [
include('_whitespace'),
(r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')),
- default(('#pop', 'before-plain-string?', 'directive-keyword?'))
+ default(('#pop', 'before-plain-string?', 'directive-keyword?'))
],
- 'before-plain-string?': [
+ 'before-plain-string?': [
include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string')),
- default('#pop')
+ (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string')),
+ default('#pop')
],
'message-string': [
(r'[~^]+', String.Escape),
@@ -386,7 +386,7 @@ class Inform6Lexer(RegexLexer):
'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table', 'terminating',
'time', 'topic', 'warning', 'with'), suffix=r'\b'),
Keyword, '#pop'),
- (r'static\b', Keyword),
+ (r'static\b', Keyword),
(r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop')
],
'_directive-keyword': [
@@ -515,16 +515,16 @@ class Inform6Lexer(RegexLexer):
while objectloop_queue:
yield objectloop_queue.pop(0)
- def analyse_text(text):
- """We try to find a keyword which seem relatively common, unfortunately
- there is a decent overlap with Smalltalk keywords otherwise here.."""
- result = 0
- if re.search('\borigsource\b', text, re.IGNORECASE):
- result += 0.05
-
- return result
-
+ def analyse_text(text):
+ """We try to find a keyword which seem relatively common, unfortunately
+ there is a decent overlap with Smalltalk keywords otherwise here.."""
+ result = 0
+ if re.search('\borigsource\b', text, re.IGNORECASE):
+ result += 0.05
+ return result
+
+
class Inform7Lexer(RegexLexer):
"""
For `Inform 7 <http://inform7.com/>`_ source code.
@@ -866,7 +866,7 @@ class Tads3Lexer(RegexLexer):
tokens = {
'root': [
- ('\ufeff', Text),
+ ('\ufeff', Text),
(r'\{', Punctuation, 'object-body'),
(r';+', Punctuation),
(r'(?=(argcount|break|case|catch|continue|default|definingobj|'
@@ -1351,17 +1351,17 @@ class Tads3Lexer(RegexLexer):
else:
token = Comment
yield index, token, value
-
- def analyse_text(text):
- """This is a rather generic descriptive language without strong
- identifiers. It looks like a 'GameMainDef' has to be present,
- and/or a 'versionInfo' with an 'IFID' field."""
- result = 0
- if '__TADS' in text or 'GameMainDef' in text:
- result += 0.2
-
- # This is a fairly unique keyword which is likely used in source as well
- if 'versionInfo' in text and 'IFID' in text:
- result += 0.1
-
- return result
+
+ def analyse_text(text):
+ """This is a rather generic descriptive language without strong
+ identifiers. It looks like a 'GameMainDef' has to be present,
+ and/or a 'versionInfo' with an 'IFID' field."""
+ result = 0
+ if '__TADS' in text or 'GameMainDef' in text:
+ result += 0.2
+
+ # This is a fairly unique keyword which is likely used in source as well
+ if 'versionInfo' in text and 'IFID' in text:
+ result += 0.1
+
+ return result
diff --git a/contrib/python/Pygments/py3/pygments/lexers/iolang.py b/contrib/python/Pygments/py3/pygments/lexers/iolang.py
index c1fbe9084e..a45fe9d62f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/iolang.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/iolang.py
@@ -4,7 +4,7 @@
Lexers for the Io language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,7 +36,7 @@ class IoLexer(RegexLexer):
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
(r'/\+', Comment.Multiline, 'nestedcomment'),
# DoubleQuotedString
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# Operators
(r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}',
Operator),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/j.py b/contrib/python/Pygments/py3/pygments/lexers/j.py
index 8a3ddcbdd1..8ed7517df8 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/j.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/j.py
@@ -4,7 +4,7 @@
Lexer for the J programming language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -88,7 +88,7 @@ class JLexer(RegexLexer):
(r'=[.:]', Operator),
# Builtins
- (r'[-=+*#$%@!~`^&";:.,<>{}\[\]\\|/?]', Operator),
+ (r'[-=+*#$%@!~`^&";:.,<>{}\[\]\\|/?]', Operator),
# Short Keywords
(r'[abCdDeEfHiIjLMoprtT]\.', Keyword.Reserved),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/javascript.py b/contrib/python/Pygments/py3/pygments/lexers/javascript.py
index 7ddd1148e6..9ad11dc9fe 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/javascript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/javascript.py
@@ -4,32 +4,32 @@
Lexers for JavaScript and related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import bygroups, combined, default, do_insertions, include, \
- inherit, Lexer, RegexLexer, this, using, words
+from pygments.lexer import bygroups, combined, default, do_insertions, include, \
+ inherit, Lexer, RegexLexer, this, using, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Other, Generic
-from pygments.util import get_bool_opt
+ Number, Punctuation, Other, Generic
+from pygments.util import get_bool_opt
import pygments.unistring as uni
__all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer',
'TypeScriptLexer', 'LassoLexer', 'ObjectiveJLexer',
- 'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer', 'JuttleLexer',
- 'NodeConsoleLexer']
+ 'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer', 'JuttleLexer',
+ 'NodeConsoleLexer']
JS_IDENT_START = ('(?:[$_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') +
']|\\\\u[a-fA-F0-9]{4})')
JS_IDENT_PART = ('(?:[$' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
'Mn', 'Mc', 'Nd', 'Pc') +
- '\u200c\u200d]|\\\\u[a-fA-F0-9]{4})')
+ '\u200c\u200d]|\\\\u[a-fA-F0-9]{4})')
JS_IDENT = JS_IDENT_START + '(?:' + JS_IDENT_PART + ')*'
-line_re = re.compile('.*?\n')
+line_re = re.compile('.*?\n')
class JavascriptLexer(RegexLexer):
"""
@@ -37,8 +37,8 @@ class JavascriptLexer(RegexLexer):
"""
name = 'JavaScript'
- aliases = ['javascript', 'js']
- filenames = ['*.js', '*.jsm', '*.mjs', '*.cjs']
+ aliases = ['javascript', 'js']
+ filenames = ['*.js', '*.jsm', '*.mjs', '*.cjs']
mimetypes = ['application/javascript', 'application/x-javascript',
'text/x-javascript', 'text/javascript']
@@ -54,7 +54,7 @@ class JavascriptLexer(RegexLexer):
'slashstartsregex': [
include('commentsandwhitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
+ r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
(r'(?=/)', Text, ('#pop', 'badregex')),
default('#pop')
],
@@ -65,63 +65,63 @@ class JavascriptLexer(RegexLexer):
(r'\A#! ?/.*?\n', Comment.Hashbang), # recognized by node.js
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
-
- # Numeric literals
- (r'0[bB][01]+n?', Number.Bin),
- (r'0[oO]?[0-7]+n?', Number.Oct), # Browsers support "0o7" and "07" (< ES5) notations
- (r'0[xX][0-9a-fA-F]+n?', Number.Hex),
- (r'[0-9]+n', Number.Integer), # Javascript BigInt requires an "n" postfix
- # Javascript doesn't have actual integer literals, so every other
- # numeric literal is handled by the regex below (including "normal")
- # integers
- (r'(\.[0-9]+|[0-9]+\.[0-9]*|[0-9]+)([eE][-+]?[0-9]+)?', Number.Float),
-
+
+ # Numeric literals
+ (r'0[bB][01]+n?', Number.Bin),
+ (r'0[oO]?[0-7]+n?', Number.Oct), # Browsers support "0o7" and "07" (< ES5) notations
+ (r'0[xX][0-9a-fA-F]+n?', Number.Hex),
+ (r'[0-9]+n', Number.Integer), # Javascript BigInt requires an "n" postfix
+ # Javascript doesn't have actual integer literals, so every other
+ # numeric literal is handled by the regex below (including "normal")
+ # integers
+ (r'(\.[0-9]+|[0-9]+\.[0-9]*|[0-9]+)([eE][-+]?[0-9]+)?', Number.Float),
+
(r'\.\.\.|=>', Punctuation),
- (r'\+\+|--|~|\?\?=?|\?|:|\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|(?:\*\*|\|\||&&|[-<>+*%&|^/]))=?', Operator, 'slashstartsregex'),
+ (r'\+\+|--|~|\?\?=?|\?|:|\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|(?:\*\*|\|\||&&|[-<>+*%&|^/]))=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
-
- (r'(typeof|instanceof|in|void|delete|new)\b', Operator.Word, 'slashstartsregex'),
-
- # Match stuff like: constructor
- (r'\b(constructor|from|as)\b', Keyword.Reserved),
-
+
+ (r'(typeof|instanceof|in|void|delete|new)\b', Operator.Word, 'slashstartsregex'),
+
+ # Match stuff like: constructor
+ (r'\b(constructor|from|as)\b', Keyword.Reserved),
+
(r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|yield|await|async|this|of|static|export|'
- r'import|debugger|extends|super)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|const|with|function|class)\b', Keyword.Declaration, 'slashstartsregex'),
-
- (r'(abstract|boolean|byte|char|double|enum|final|float|goto|'
- r'implements|int|interface|long|native|package|private|protected|'
- r'public|short|synchronized|throws|transient|volatile)\b', Keyword.Reserved),
+ r'throw|try|catch|finally|yield|await|async|this|of|static|export|'
+ r'import|debugger|extends|super)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|const|with|function|class)\b', Keyword.Declaration, 'slashstartsregex'),
+
+ (r'(abstract|boolean|byte|char|double|enum|final|float|goto|'
+ r'implements|int|interface|long|native|package|private|protected|'
+ r'public|short|synchronized|throws|transient|volatile)\b', Keyword.Reserved),
(r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
-
- (r'(Array|Boolean|Date|BigInt|Function|Math|ArrayBuffer|'
- r'Number|Object|RegExp|String|Promise|Proxy|decodeURI|'
+
+ (r'(Array|Boolean|Date|BigInt|Function|Math|ArrayBuffer|'
+ r'Number|Object|RegExp|String|Promise|Proxy|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|DataView|'
- r'document|window|globalThis|global|Symbol|Intl|'
- r'WeakSet|WeakMap|Set|Map|Reflect|JSON|Atomics|'
- r'Int(?:8|16|32)Array|BigInt64Array|Float32Array|Float64Array|'
- r'Uint8ClampedArray|Uint(?:8|16|32)Array|BigUint64Array)\b', Name.Builtin),
-
- (r'((?:Eval|Internal|Range|Reference|Syntax|Type|URI)?Error)\b', Name.Exception),
-
- # Match stuff like: super(argument, list)
- (r'(super)(\s*)(\([\w,?.$\s]+\s*\))',
- bygroups(Keyword, Text), 'slashstartsregex'),
- # Match stuff like: function() {...}
- (r'([a-zA-Z_?.$][\w?.$]*)(?=\(\) \{)', Name.Other, 'slashstartsregex'),
-
+ r'eval|isFinite|isNaN|parseFloat|parseInt|DataView|'
+ r'document|window|globalThis|global|Symbol|Intl|'
+ r'WeakSet|WeakMap|Set|Map|Reflect|JSON|Atomics|'
+ r'Int(?:8|16|32)Array|BigInt64Array|Float32Array|Float64Array|'
+ r'Uint8ClampedArray|Uint(?:8|16|32)Array|BigUint64Array)\b', Name.Builtin),
+
+ (r'((?:Eval|Internal|Range|Reference|Syntax|Type|URI)?Error)\b', Name.Exception),
+
+ # Match stuff like: super(argument, list)
+ (r'(super)(\s*)(\([\w,?.$\s]+\s*\))',
+ bygroups(Keyword, Text), 'slashstartsregex'),
+ # Match stuff like: function() {...}
+ (r'([a-zA-Z_?.$][\w?.$]*)(?=\(\) \{)', Name.Other, 'slashstartsregex'),
+
(JS_IDENT, Name.Other),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'`', String.Backtick, 'interp'),
],
'interp': [
(r'`', String.Backtick, '#pop'),
- (r'\\.', String.Backtick),
+ (r'\\.', String.Backtick),
(r'\$\{', String.Interpol, 'interp-inside'),
(r'\$', String.Backtick),
(r'[^`\\$]+', String.Backtick),
@@ -134,43 +134,43 @@ class JavascriptLexer(RegexLexer):
}
-class TypeScriptLexer(JavascriptLexer):
- """
- For `TypeScript <http://typescriptlang.org/>`_ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'TypeScript'
- aliases = ['typescript', 'ts']
- filenames = ['*.ts']
- mimetypes = ['application/x-typescript', 'text/x-typescript']
-
- # Higher priority than the TypoScriptLexer, as TypeScript is far more
- # common these days
- priority = 0.5
-
- tokens = {
- 'root': [
- (r'(abstract|implements|private|protected|public|readonly)\b',
- Keyword, 'slashstartsregex'),
- (r'(enum|interface|override)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'\b(declare|type)\b', Keyword.Reserved),
- # Match variable type keywords
- (r'\b(string|boolean|number)\b', Keyword.Type),
- # Match stuff like: module name {...}
- (r'\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)',
- bygroups(Keyword.Reserved, Text, Name.Other, Text), 'slashstartsregex'),
- # Match stuff like: (function: return type)
- (r'([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)',
- bygroups(Name.Other, Text, Keyword.Type)),
- # Match stuff like: Decorators
- (r'@' + JS_IDENT, Keyword.Declaration),
- inherit,
- ],
- }
-
-
+class TypeScriptLexer(JavascriptLexer):
+ """
+ For `TypeScript <http://typescriptlang.org/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'TypeScript'
+ aliases = ['typescript', 'ts']
+ filenames = ['*.ts']
+ mimetypes = ['application/x-typescript', 'text/x-typescript']
+
+ # Higher priority than the TypoScriptLexer, as TypeScript is far more
+ # common these days
+ priority = 0.5
+
+ tokens = {
+ 'root': [
+ (r'(abstract|implements|private|protected|public|readonly)\b',
+ Keyword, 'slashstartsregex'),
+ (r'(enum|interface|override)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'\b(declare|type)\b', Keyword.Reserved),
+ # Match variable type keywords
+ (r'\b(string|boolean|number)\b', Keyword.Type),
+ # Match stuff like: module name {...}
+ (r'\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)',
+ bygroups(Keyword.Reserved, Text, Name.Other, Text), 'slashstartsregex'),
+ # Match stuff like: (function: return type)
+ (r'([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)',
+ bygroups(Name.Other, Text, Keyword.Type)),
+ # Match stuff like: Decorators
+ (r'@' + JS_IDENT, Keyword.Declaration),
+ inherit,
+ ],
+ }
+
+
class KalLexer(RegexLexer):
"""
For `Kal`_ source code.
@@ -216,7 +216,7 @@ class KalLexer(RegexLexer):
'root': [
include('commentsandwhitespace'),
(r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuysd]+\b|\B)', String.Regex),
+ r'([gimuysd]+\b|\B)', String.Regex),
(r'\?|:|_(?=\n)|==?|!=|-(?!>)|[<>+*/-]=?',
Operator),
(r'\b(and|or|isnt|is|not|but|bitwise|mod|\^|xor|exists|'
@@ -238,11 +238,11 @@ class KalLexer(RegexLexer):
(r'(?<![.$])(true|false|yes|no|on|off|null|nothing|none|'
r'NaN|Infinity|undefined)\b',
Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|'
- r'Number|Object|RegExp|String|decodeURI|'
+ (r'(Array|Boolean|Date|Error|Function|Math|'
+ r'Number|Object|RegExp|String|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|document|'
- r'window|globalThis|Symbol|print)\b', Name.Builtin),
+ r'window|globalThis|Symbol|print)\b', Name.Builtin),
(r'[$a-zA-Z_][\w.$]*\s*(:|[+\-*/]?\=)?\b', Name.Variable),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
@@ -290,13 +290,13 @@ class LiveScriptLexer(RegexLexer):
"""
For `LiveScript`_ source code.
- .. _LiveScript: https://livescript.net/
+ .. _LiveScript: https://livescript.net/
.. versionadded:: 1.6
"""
name = 'LiveScript'
- aliases = ['livescript', 'live-script']
+ aliases = ['livescript', 'live-script']
filenames = ['*.ls']
mimetypes = ['text/livescript']
@@ -309,7 +309,7 @@ class LiveScriptLexer(RegexLexer):
],
'multilineregex': [
include('commentsandwhitespace'),
- (r'//([gimuysd]+\b|\B)', String.Regex, '#pop'),
+ (r'//([gimuysd]+\b|\B)', String.Regex, '#pop'),
(r'/', String.Regex),
(r'[^/#]+', String.Regex)
],
@@ -317,12 +317,12 @@ class LiveScriptLexer(RegexLexer):
include('commentsandwhitespace'),
(r'//', String.Regex, ('#pop', 'multilineregex')),
(r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
- (r'/', Operator, '#pop'),
+ r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
+ (r'/', Operator, '#pop'),
default('#pop'),
],
'root': [
- (r'\A(?=\s|/)', Text, 'slashstartsregex'),
+ (r'\A(?=\s|/)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
@@ -341,11 +341,11 @@ class LiveScriptLexer(RegexLexer):
(r'(?<![.$])(true|false|yes|no|on|off|'
r'null|NaN|Infinity|undefined|void)\b',
Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|'
- r'Number|Object|RegExp|String|decodeURI|'
+ (r'(Array|Boolean|Date|Error|Function|Math|'
+ r'Number|Object|RegExp|String|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|document|window|'
- r'globalThis|Symbol|Symbol|BigInt)\b', Name.Builtin),
+ r'eval|isFinite|isNaN|parseFloat|parseInt|document|window|'
+ r'globalThis|Symbol|Symbol|BigInt)\b', Name.Builtin),
(r'[$a-zA-Z_][\w.\-:$]*\s*[:=]\s', Name.Variable,
'slashstartsregex'),
(r'@[$a-zA-Z_][\w.\-:$]*\s*[:=]\s', Name.Variable.Instance,
@@ -399,7 +399,7 @@ class LiveScriptLexer(RegexLexer):
class DartLexer(RegexLexer):
"""
- For `Dart <http://dart.dev/>`_ source code.
+ For `Dart <http://dart.dev/>`_ source code.
.. versionadded:: 1.5
"""
@@ -420,19 +420,19 @@ class DartLexer(RegexLexer):
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
- (r'\b(class|extension|mixin)\b(\s+)',
+ (r'\b(class|extension|mixin)\b(\s+)',
bygroups(Keyword.Declaration, Text), 'class'),
- (r'\b(as|assert|break|case|catch|const|continue|default|do|else|finally|'
- r'for|if|in|is|new|rethrow|return|super|switch|this|throw|try|while)\b',
+ (r'\b(as|assert|break|case|catch|const|continue|default|do|else|finally|'
+ r'for|if|in|is|new|rethrow|return|super|switch|this|throw|try|while)\b',
Keyword),
- (r'\b(abstract|async|await|const|covariant|extends|external|factory|final|'
- r'get|implements|late|native|on|operator|required|set|static|sync|typedef|'
- r'var|with|yield)\b', Keyword.Declaration),
- (r'\b(bool|double|dynamic|int|num|Function|Never|Null|Object|String|void)\b',
- Keyword.Type),
+ (r'\b(abstract|async|await|const|covariant|extends|external|factory|final|'
+ r'get|implements|late|native|on|operator|required|set|static|sync|typedef|'
+ r'var|with|yield)\b', Keyword.Declaration),
+ (r'\b(bool|double|dynamic|int|num|Function|Never|Null|Object|String|void)\b',
+ Keyword.Type),
(r'\b(false|null|true)\b', Keyword.Constant),
(r'[~!%^&*+=|?:<>/-]|as\b', Operator),
- (r'@[a-zA-Z_$]\w*', Name.Decorator),
+ (r'@[a-zA-Z_$]\w*', Name.Decorator),
(r'[a-zA-Z_$]\w*:', Name.Label),
(r'[a-zA-Z_$]\w*', Name),
(r'[(){}\[\],.;]', Punctuation),
@@ -449,7 +449,7 @@ class DartLexer(RegexLexer):
'import_decl': [
include('string_literal'),
(r'\s+', Text),
- (r'\b(as|deferred|show|hide)\b', Keyword),
+ (r'\b(as|deferred|show|hide)\b', Keyword),
(r'[a-zA-Z_$]\w*', Name),
(r'\,', Punctuation),
(r'\;', Punctuation, '#pop')
@@ -727,9 +727,9 @@ class LassoLexer(RegexLexer):
self._members = set()
if self.builtinshighlighting:
from pygments.lexers._lasso_builtins import BUILTINS, MEMBERS
- for key, value in BUILTINS.items():
+ for key, value in BUILTINS.items():
self._builtins.update(value)
- for key, value in MEMBERS.items():
+ for key, value in MEMBERS.items():
self._members.update(value)
RegexLexer.__init__(self, **options)
@@ -828,8 +828,8 @@ class ObjectiveJLexer(RegexLexer):
(r'(L|@)?"', String, 'string'),
(r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
String.Char),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
(r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
@@ -866,11 +866,11 @@ class ObjectiveJLexer(RegexLexer):
r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|'
r'SQRT2)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|'
- r'Number|Object|RegExp|String|decodeURI|'
+ (r'(Array|Boolean|Date|Error|Function|Math|'
+ r'Number|Object|RegExp|String|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window|globalThis|Symbol)\b', Name.Builtin),
+ r'window|globalThis|Symbol)\b', Name.Builtin),
(r'([$a-zA-Z_]\w*)(' + _ws + r')(?=\()',
bygroups(Name.Function, using(this))),
@@ -990,14 +990,14 @@ class CoffeeScriptLexer(RegexLexer):
"""
name = 'CoffeeScript'
- aliases = ['coffeescript', 'coffee-script', 'coffee']
+ aliases = ['coffeescript', 'coffee-script', 'coffee']
filenames = ['*.coffee']
mimetypes = ['text/coffeescript']
_operator_re = (
r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
r'\|\||\\(?=\n)|'
- r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&|\^/])=?')
+ r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&|\^/])=?')
flags = re.DOTALL
tokens = {
@@ -1008,7 +1008,7 @@ class CoffeeScriptLexer(RegexLexer):
],
'multilineregex': [
(r'[^/#]+', String.Regex),
- (r'///([gimuysd]+\b|\B)', String.Regex, '#pop'),
+ (r'///([gimuysd]+\b|\B)', String.Regex, '#pop'),
(r'#\{', String.Interpol, 'interpoling_string'),
(r'[/#]', String.Regex),
],
@@ -1016,16 +1016,16 @@ class CoffeeScriptLexer(RegexLexer):
include('commentsandwhitespace'),
(r'///', String.Regex, ('#pop', 'multilineregex')),
(r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
+ r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
# This isn't really guarding against mishighlighting well-formed
# code, just the ability to infinite-loop between root and
# slashstartsregex.
- (r'/', Operator, '#pop'),
+ (r'/', Operator, '#pop'),
default('#pop'),
],
'root': [
include('commentsandwhitespace'),
- (r'\A(?=\s|/)', Text, 'slashstartsregex'),
+ (r'\A(?=\s|/)', Text, 'slashstartsregex'),
(_operator_re, Operator, 'slashstartsregex'),
(r'(?:\([^()]*\))?\s*[=-]>', Name.Function, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
@@ -1038,10 +1038,10 @@ class CoffeeScriptLexer(RegexLexer):
(r'(?<![.$])(true|false|yes|no|on|off|null|'
r'NaN|Infinity|undefined)\b',
Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|'
- r'Number|Object|RegExp|String|decodeURI|'
+ (r'(Array|Boolean|Date|Error|Function|Math|'
+ r'Number|Object|RegExp|String|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|document|window|globalThis|Symbol)\b',
+ r'eval|isFinite|isNaN|parseFloat|parseInt|document|window|globalThis|Symbol)\b',
Name.Builtin),
(r'[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable,
'slashstartsregex'),
@@ -1095,7 +1095,7 @@ class CoffeeScriptLexer(RegexLexer):
class MaskLexer(RegexLexer):
"""
- For `Mask <https://github.com/atmajs/MaskJS>`__ markup.
+ For `Mask <https://github.com/atmajs/MaskJS>`__ markup.
.. versionadded:: 2.0
"""
@@ -1417,24 +1417,24 @@ class EarlGreyLexer(RegexLexer):
(r'8r[0-7]+', Number.Oct),
(r'2r[01]+', Number.Bin),
(r'16r[a-fA-F0-9]+', Number.Hex),
- (r'([3-79]|[12][0-9]|3[0-6])r[a-zA-Z\d]+(\.[a-zA-Z\d]+)?',
- Number.Radix),
+ (r'([3-79]|[12][0-9]|3[0-6])r[a-zA-Z\d]+(\.[a-zA-Z\d]+)?',
+ Number.Radix),
(r'\d+', Number.Integer)
],
}
-
+
class JuttleLexer(RegexLexer):
"""
For `Juttle`_ source code.
.. _Juttle: https://github.com/juttle/juttle
- .. versionadded:: 2.2
+ .. versionadded:: 2.2
"""
name = 'Juttle'
- aliases = ['juttle']
+ aliases = ['juttle']
filenames = ['*.juttle']
mimetypes = ['application/juttle', 'application/x-juttle',
'text/x-juttle', 'text/juttle']
@@ -1450,7 +1450,7 @@ class JuttleLexer(RegexLexer):
'slashstartsregex': [
include('commentsandwhitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
+ r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
(r'(?=/)', Text, ('#pop', 'badregex')),
default('#pop')
],
@@ -1465,90 +1465,90 @@ class JuttleLexer(RegexLexer):
r'(\d+(\.\d*)?|\.\d+)(ms|[smhdwMy])?):', String.Moment),
(r':\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(\.\d*)?)?'
r'(Z|[+-]\d{2}:\d{2}|[+-]\d{4})?:', String.Moment),
- (r':((\d+(\.\d*)?|\.\d+)[ ]+)?(millisecond|second|minute|hour|'
- r'day|week|month|year)[s]?'
- r'(([ ]+and[ ]+(\d+[ ]+)?(millisecond|second|minute|hour|'
- r'day|week|month|year)[s]?)'
+ (r':((\d+(\.\d*)?|\.\d+)[ ]+)?(millisecond|second|minute|hour|'
+ r'day|week|month|year)[s]?'
+ r'(([ ]+and[ ]+(\d+[ ]+)?(millisecond|second|minute|hour|'
+ r'day|week|month|year)[s]?)'
r'|[ ]+(ago|from[ ]+now))*:', String.Moment),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
r'(==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(import|return|continue|if|else)\b', Keyword, 'slashstartsregex'),
- (r'(var|const|function|reducer|sub|input)\b', Keyword.Declaration,
- 'slashstartsregex'),
+ (r'(var|const|function|reducer|sub|input)\b', Keyword.Declaration,
+ 'slashstartsregex'),
(r'(batch|emit|filter|head|join|keep|pace|pass|put|read|reduce|remove|'
- r'sequence|skip|sort|split|tail|unbatch|uniq|view|write)\b',
- Keyword.Reserved),
+ r'sequence|skip|sort|split|tail|unbatch|uniq|view|write)\b',
+ Keyword.Reserved),
(r'(true|false|null|Infinity)\b', Keyword.Constant),
- (r'(Array|Date|Juttle|Math|Number|Object|RegExp|String)\b',
- Name.Builtin),
+ (r'(Array|Date|Juttle|Math|Number|Object|RegExp|String)\b',
+ Name.Builtin),
(JS_IDENT, Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
]
}
-
-
-class NodeConsoleLexer(Lexer):
- """
- For parsing within an interactive Node.js REPL, such as:
-
- .. sourcecode:: nodejsrepl
-
- > let a = 3
- undefined
- > a
- 3
- > let b = '4'
- undefined
- > b
- '4'
- > b == a
- false
-
- .. versionadded: 2.10
- """
- name = 'Node.js REPL console session'
- aliases = ['nodejsrepl', ]
- mimetypes = ['text/x-nodejsrepl', ]
-
- def get_tokens_unprocessed(self, text):
- jslexer = JavascriptLexer(**self.options)
-
- curcode = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith('> '):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:2])]))
-
- curcode += line[2:]
- elif line.startswith('...'):
- # node does a nested ... thing depending on depth
- code = line.lstrip('.')
- lead = len(line) - len(code)
-
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:lead])]))
-
- curcode += code
- else:
- if curcode:
- yield from do_insertions(insertions,
- jslexer.get_tokens_unprocessed(curcode))
-
- curcode = ''
- insertions = []
-
- yield from do_insertions([],
- jslexer.get_tokens_unprocessed(line))
-
- if curcode:
- yield from do_insertions(insertions,
- jslexer.get_tokens_unprocessed(curcode))
+
+
+class NodeConsoleLexer(Lexer):
+ """
+ For parsing within an interactive Node.js REPL, such as:
+
+ .. sourcecode:: nodejsrepl
+
+ > let a = 3
+ undefined
+ > a
+ 3
+ > let b = '4'
+ undefined
+ > b
+ '4'
+ > b == a
+ false
+
+ .. versionadded: 2.10
+ """
+ name = 'Node.js REPL console session'
+ aliases = ['nodejsrepl', ]
+ mimetypes = ['text/x-nodejsrepl', ]
+
+ def get_tokens_unprocessed(self, text):
+ jslexer = JavascriptLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith('> '):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:2])]))
+
+ curcode += line[2:]
+ elif line.startswith('...'):
+ # node does a nested ... thing depending on depth
+ code = line.lstrip('.')
+ lead = len(line) - len(code)
+
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:lead])]))
+
+ curcode += code
+ else:
+ if curcode:
+ yield from do_insertions(insertions,
+ jslexer.get_tokens_unprocessed(curcode))
+
+ curcode = ''
+ insertions = []
+
+ yield from do_insertions([],
+ jslexer.get_tokens_unprocessed(line))
+
+ if curcode:
+ yield from do_insertions(insertions,
+ jslexer.get_tokens_unprocessed(curcode))
diff --git a/contrib/python/Pygments/py3/pygments/lexers/jslt.py b/contrib/python/Pygments/py3/pygments/lexers/jslt.py
index f4f14a685b..888642abb0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/jslt.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/jslt.py
@@ -1,94 +1,94 @@
-"""
- pygments.lexers.jslt
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the JSLT language
-
+"""
+ pygments.lexers.jslt
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the JSLT language
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, combined, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Whitespace
-
-
-__all__ = ['JSLTLexer']
-
-
-_WORD_END = r'(?=[^0-9A-Z_a-z-])'
-
-
-class JSLTLexer(RegexLexer):
- """
- For `JSLT <https://github.com/schibsted/jslt>`_ source.
-
- .. versionadded:: 2.10
- """
- name = 'JSLT'
- filenames = ['*.jslt']
- aliases = ['jslt']
- mimetypes = ['text/x-jslt']
-
- tokens = {
- 'root': [
- (r'[\t\n\f\r ]+', Whitespace),
- (r'//.*(\n|\Z)', Comment.Single),
- (r'-?(0|[1-9][0-9]*)', Number.Integer),
- (r'-?(0|[1-9][0-9]*)(.[0-9]+a)?([Ee][+-]?[0-9]+)', Number.Float),
- (r'"([^"\\]|\\.)*"', String.Double),
- (r'[(),:\[\]{}]', Punctuation),
- (r'(!=|[<=>]=?)', Operator),
- (r'[*+/|-]', Operator),
- (r'\.', Operator),
- (words(('import',), suffix=_WORD_END), Keyword.Namespace, combined('import-path', 'whitespace')),
- (words(('as',), suffix=_WORD_END), Keyword.Namespace, combined('import-alias', 'whitespace')),
- (words(('let',), suffix=_WORD_END), Keyword.Declaration, combined('constant', 'whitespace')),
- (words(('def',), suffix=_WORD_END), Keyword.Declaration, combined('function', 'whitespace')),
- (words(('false', 'null', 'true'), suffix=_WORD_END), Keyword.Constant),
- (words(('else', 'for', 'if'), suffix=_WORD_END), Keyword),
- (words(('and', 'or'), suffix=_WORD_END), Operator.Word),
- (words((
- 'all', 'any', 'array', 'boolean', 'capture', 'ceiling',
- 'contains', 'ends-with', 'error', 'flatten', 'floor',
- 'format-time', 'from-json', 'get-key', 'hash-int', 'index-of',
- 'is-array', 'is-boolean', 'is-decimal', 'is-integer',
- 'is-number', 'is-object', 'is-string', 'join', 'lowercase',
- 'max', 'min', 'mod', 'not', 'now', 'number', 'parse-time',
- 'parse-url', 'random', 'replace', 'round', 'sha256-hex', 'size',
- 'split', 'starts-with', 'string', 'sum', 'test', 'to-json',
- 'trim', 'uppercase', 'zip', 'zip-with-index', 'fallback'), suffix=_WORD_END),
- Name.Builtin),
- (r'[A-Z_a-z][0-9A-Z_a-z-]*:[A-Z_a-z][0-9A-Z_a-z-]*', Name.Function),
- (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name),
- (r'\$[A-Z_a-z][0-9A-Z_a-z-]*', Name.Variable),
- ],
- 'constant': [
- (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name.Variable, 'root'),
- ],
- 'function': [
- (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name.Function, combined('function-parameter-list', 'whitespace')),
- ],
- 'function-parameter-list': [
- (r'\(', Punctuation, combined('function-parameters', 'whitespace')),
- ],
- 'function-parameters': [
- (r',', Punctuation),
- (r'\)', Punctuation, 'root'),
- (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name.Variable),
- ],
- 'import-path': [
- (r'"([^"]|\\.)*"', String.Symbol, 'root'),
- ],
- 'import-alias': [
- (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name.Namespace, 'root'),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\.', String.Escape),
- ],
- 'whitespace': [
- (r'[\t\n\f\r ]+', Whitespace),
- (r'//.*(\n|\Z)', Comment.Single),
- ]
- }
+"""
+
+from pygments.lexer import RegexLexer, combined, words
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+ Punctuation, String, Whitespace
+
+
+__all__ = ['JSLTLexer']
+
+
+_WORD_END = r'(?=[^0-9A-Z_a-z-])'
+
+
+class JSLTLexer(RegexLexer):
+ """
+ For `JSLT <https://github.com/schibsted/jslt>`_ source.
+
+ .. versionadded:: 2.10
+ """
+ name = 'JSLT'
+ filenames = ['*.jslt']
+ aliases = ['jslt']
+ mimetypes = ['text/x-jslt']
+
+ tokens = {
+ 'root': [
+ (r'[\t\n\f\r ]+', Whitespace),
+ (r'//.*(\n|\Z)', Comment.Single),
+ (r'-?(0|[1-9][0-9]*)', Number.Integer),
+ (r'-?(0|[1-9][0-9]*)(.[0-9]+a)?([Ee][+-]?[0-9]+)', Number.Float),
+ (r'"([^"\\]|\\.)*"', String.Double),
+ (r'[(),:\[\]{}]', Punctuation),
+ (r'(!=|[<=>]=?)', Operator),
+ (r'[*+/|-]', Operator),
+ (r'\.', Operator),
+ (words(('import',), suffix=_WORD_END), Keyword.Namespace, combined('import-path', 'whitespace')),
+ (words(('as',), suffix=_WORD_END), Keyword.Namespace, combined('import-alias', 'whitespace')),
+ (words(('let',), suffix=_WORD_END), Keyword.Declaration, combined('constant', 'whitespace')),
+ (words(('def',), suffix=_WORD_END), Keyword.Declaration, combined('function', 'whitespace')),
+ (words(('false', 'null', 'true'), suffix=_WORD_END), Keyword.Constant),
+ (words(('else', 'for', 'if'), suffix=_WORD_END), Keyword),
+ (words(('and', 'or'), suffix=_WORD_END), Operator.Word),
+ (words((
+ 'all', 'any', 'array', 'boolean', 'capture', 'ceiling',
+ 'contains', 'ends-with', 'error', 'flatten', 'floor',
+ 'format-time', 'from-json', 'get-key', 'hash-int', 'index-of',
+ 'is-array', 'is-boolean', 'is-decimal', 'is-integer',
+ 'is-number', 'is-object', 'is-string', 'join', 'lowercase',
+ 'max', 'min', 'mod', 'not', 'now', 'number', 'parse-time',
+ 'parse-url', 'random', 'replace', 'round', 'sha256-hex', 'size',
+ 'split', 'starts-with', 'string', 'sum', 'test', 'to-json',
+ 'trim', 'uppercase', 'zip', 'zip-with-index', 'fallback'), suffix=_WORD_END),
+ Name.Builtin),
+ (r'[A-Z_a-z][0-9A-Z_a-z-]*:[A-Z_a-z][0-9A-Z_a-z-]*', Name.Function),
+ (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name),
+ (r'\$[A-Z_a-z][0-9A-Z_a-z-]*', Name.Variable),
+ ],
+ 'constant': [
+ (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name.Variable, 'root'),
+ ],
+ 'function': [
+ (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name.Function, combined('function-parameter-list', 'whitespace')),
+ ],
+ 'function-parameter-list': [
+ (r'\(', Punctuation, combined('function-parameters', 'whitespace')),
+ ],
+ 'function-parameters': [
+ (r',', Punctuation),
+ (r'\)', Punctuation, 'root'),
+ (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name.Variable),
+ ],
+ 'import-path': [
+ (r'"([^"]|\\.)*"', String.Symbol, 'root'),
+ ],
+ 'import-alias': [
+ (r'[A-Z_a-z][0-9A-Z_a-z-]*', Name.Namespace, 'root'),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'\\.', String.Escape),
+ ],
+ 'whitespace': [
+ (r'[\t\n\f\r ]+', Whitespace),
+ (r'//.*(\n|\Z)', Comment.Single),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/julia.py b/contrib/python/Pygments/py3/pygments/lexers/julia.py
index 390d5d7158..0a4cc5aebe 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/julia.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/julia.py
@@ -4,7 +4,7 @@
Lexers for the Julia language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,17 +14,17 @@ from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
words, include
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic
-from pygments.util import shebang_matches
-from pygments.lexers._julia_builtins import OPERATORS_LIST, DOTTED_OPERATORS_LIST, \
- KEYWORD_LIST, BUILTIN_LIST, LITERAL_LIST
+from pygments.util import shebang_matches
+from pygments.lexers._julia_builtins import OPERATORS_LIST, DOTTED_OPERATORS_LIST, \
+ KEYWORD_LIST, BUILTIN_LIST, LITERAL_LIST
__all__ = ['JuliaLexer', 'JuliaConsoleLexer']
-# see https://docs.julialang.org/en/v1/manual/variables/#Allowed-Variable-Names
-allowed_variable = \
- '(?:[a-zA-Z_\u00A1-\U0010ffff][a-zA-Z_0-9!\u00A1-\U0010ffff]*)'
-# see https://github.com/JuliaLang/julia/blob/master/src/flisp/julia_opsuffs.h
-operator_suffixes = r'[²³¹ʰʲʳʷʸˡˢˣᴬᴮᴰᴱᴳᴴᴵᴶᴷᴸᴹᴺᴼᴾᴿᵀᵁᵂᵃᵇᵈᵉᵍᵏᵐᵒᵖᵗᵘᵛᵝᵞᵟᵠᵡᵢᵣᵤᵥᵦᵧᵨᵩᵪᶜᶠᶥᶦᶫᶰᶸᶻᶿ′″‴‵‶‷⁗⁰ⁱ⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎ₐₑₒₓₕₖₗₘₙₚₛₜⱼⱽ]*'
+# see https://docs.julialang.org/en/v1/manual/variables/#Allowed-Variable-Names
+allowed_variable = \
+ '(?:[a-zA-Z_\u00A1-\U0010ffff][a-zA-Z_0-9!\u00A1-\U0010ffff]*)'
+# see https://github.com/JuliaLang/julia/blob/master/src/flisp/julia_opsuffs.h
+operator_suffixes = r'[²³¹ʰʲʳʷʸˡˢˣᴬᴮᴰᴱᴳᴴᴵᴶᴷᴸᴹᴺᴼᴾᴿᵀᵁᵂᵃᵇᵈᵉᵍᵏᵐᵒᵖᵗᵘᵛᵝᵞᵟᵠᵡᵢᵣᵤᵥᵦᵧᵨᵩᵪᶜᶠᶥᶦᶫᶰᶸᶻᶿ′″‴‵‶‷⁗⁰ⁱ⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎ₐₑₒₓₕₖₗₘₙₚₛₜⱼⱽ]*'
class JuliaLexer(RegexLexer):
"""
@@ -46,36 +46,36 @@ class JuliaLexer(RegexLexer):
(r'[^\S\n]+', Text),
(r'#=', Comment.Multiline, "blockcomment"),
(r'#.*$', Comment),
- (r'[\[\](),;]', Punctuation),
-
- # symbols
- # intercept range expressions first
- (r'(' + allowed_variable + r')(\s*)(:)(' + allowed_variable + ')',
- bygroups(Name, Text, Operator, Name)),
- # then match :name which does not follow closing brackets, digits, or the
- # ::, <:, and :> operators
- (r'(?<![\]):<>\d.])(:' + allowed_variable + ')', String.Symbol),
-
- # type assertions - excludes expressions like ::typeof(sin) and ::avec[1]
- (r'(?<=::)(\s*)(' + allowed_variable + r')\b(?![(\[])', bygroups(Text, Keyword.Type)),
- # type comparisons
- # - MyType <: A or MyType >: A
- ('(' + allowed_variable + r')(\s*)([<>]:)(\s*)(' + allowed_variable + r')\b(?![(\[])',
- bygroups(Keyword.Type, Text, Operator, Text, Keyword.Type)),
- # - <: B or >: B
- (r'([<>]:)(\s*)(' + allowed_variable + r')\b(?![(\[])',
- bygroups(Operator, Text, Keyword.Type)),
- # - A <: or A >:
- (r'\b(' + allowed_variable + r')(\s*)([<>]:)',
- bygroups(Keyword.Type, Text, Operator)),
-
- # operators
- # Suffixes aren't actually allowed on all operators, but we'll ignore that
- # since those cases are invalid Julia code.
- (words([*OPERATORS_LIST, *DOTTED_OPERATORS_LIST], suffix=operator_suffixes), Operator),
- (words(['.' + o for o in DOTTED_OPERATORS_LIST], suffix=operator_suffixes), Operator),
- (words(['...', '..']), Operator),
-
+ (r'[\[\](),;]', Punctuation),
+
+ # symbols
+ # intercept range expressions first
+ (r'(' + allowed_variable + r')(\s*)(:)(' + allowed_variable + ')',
+ bygroups(Name, Text, Operator, Name)),
+ # then match :name which does not follow closing brackets, digits, or the
+ # ::, <:, and :> operators
+ (r'(?<![\]):<>\d.])(:' + allowed_variable + ')', String.Symbol),
+
+ # type assertions - excludes expressions like ::typeof(sin) and ::avec[1]
+ (r'(?<=::)(\s*)(' + allowed_variable + r')\b(?![(\[])', bygroups(Text, Keyword.Type)),
+ # type comparisons
+ # - MyType <: A or MyType >: A
+ ('(' + allowed_variable + r')(\s*)([<>]:)(\s*)(' + allowed_variable + r')\b(?![(\[])',
+ bygroups(Keyword.Type, Text, Operator, Text, Keyword.Type)),
+ # - <: B or >: B
+ (r'([<>]:)(\s*)(' + allowed_variable + r')\b(?![(\[])',
+ bygroups(Operator, Text, Keyword.Type)),
+ # - A <: or A >:
+ (r'\b(' + allowed_variable + r')(\s*)([<>]:)',
+ bygroups(Keyword.Type, Text, Operator)),
+
+ # operators
+ # Suffixes aren't actually allowed on all operators, but we'll ignore that
+ # since those cases are invalid Julia code.
+ (words([*OPERATORS_LIST, *DOTTED_OPERATORS_LIST], suffix=operator_suffixes), Operator),
+ (words(['.' + o for o in DOTTED_OPERATORS_LIST], suffix=operator_suffixes), Operator),
+ (words(['...', '..']), Operator),
+
# NOTE
# Patterns below work only for definition sites and thus hardly reliable.
#
@@ -88,61 +88,61 @@ class JuliaLexer(RegexLexer):
r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
# try to match trailing transpose
- (r'(?<=[.\w)\]])(\'' + operator_suffixes + ')+', Operator),
+ (r'(?<=[.\w)\]])(\'' + operator_suffixes + ')+', Operator),
- # raw strings
- (r'(raw)(""")', bygroups(String.Affix, String), 'tqrawstring'),
- (r'(raw)(")', bygroups(String.Affix, String), 'rawstring'),
+ # raw strings
+ (r'(raw)(""")', bygroups(String.Affix, String), 'tqrawstring'),
+ (r'(raw)(")', bygroups(String.Affix, String), 'rawstring'),
# regular expressions
- (r'(r)(""")', bygroups(String.Affix, String.Regex), 'tqregex'),
- (r'(r)(")', bygroups(String.Affix, String.Regex), 'regex'),
- # other strings
- (r'(' + allowed_variable + ')?(""")', bygroups(String.Affix, String), 'tqstring'),
- (r'(' + allowed_variable + ')?(")', bygroups(String.Affix, String), 'string'),
+ (r'(r)(""")', bygroups(String.Affix, String.Regex), 'tqregex'),
+ (r'(r)(")', bygroups(String.Affix, String.Regex), 'regex'),
+ # other strings
+ (r'(' + allowed_variable + ')?(""")', bygroups(String.Affix, String), 'tqstring'),
+ (r'(' + allowed_variable + ')?(")', bygroups(String.Affix, String), 'string'),
# backticks
- (r'(' + allowed_variable + ')?(```)', bygroups(String.Affix, String.Backtick), 'tqcommand'),
- (r'(' + allowed_variable + ')?(`)', bygroups(String.Affix, String.Backtick), 'command'),
-
- # type names
- # - names that begin a curly expression
- ('(' + allowed_variable + r')(\{)',
- bygroups(Keyword.Type, Punctuation), 'curly'),
- # - names as part of bare 'where'
- (r'(where)(\s+)(' + allowed_variable + ')',
- bygroups(Keyword, Text, Keyword.Type)),
- # - curly expressions in general
- (r'(\{)', Punctuation, 'curly'),
- # - names as part of type declaration
- (r'(abstract[ \t]+type|primitive[ \t]+type|mutable[ \t]+struct|struct)([\s()]+)(' +
- allowed_variable + r')', bygroups(Keyword, Text, Keyword.Type)),
-
- # macros
- (r'@' + allowed_variable, Name.Decorator),
- (words([*OPERATORS_LIST, '..', '.', *DOTTED_OPERATORS_LIST],
- prefix='@', suffix=operator_suffixes), Name.Decorator),
-
- # keywords
- (words(KEYWORD_LIST, suffix=r'\b'), Keyword),
- # builtin types
- (words(BUILTIN_LIST, suffix=r'\b'), Keyword.Type),
- # builtin literals
- (words(LITERAL_LIST, suffix=r'\b'), Name.Builtin),
-
+ (r'(' + allowed_variable + ')?(```)', bygroups(String.Affix, String.Backtick), 'tqcommand'),
+ (r'(' + allowed_variable + ')?(`)', bygroups(String.Affix, String.Backtick), 'command'),
+
+ # type names
+ # - names that begin a curly expression
+ ('(' + allowed_variable + r')(\{)',
+ bygroups(Keyword.Type, Punctuation), 'curly'),
+ # - names as part of bare 'where'
+ (r'(where)(\s+)(' + allowed_variable + ')',
+ bygroups(Keyword, Text, Keyword.Type)),
+ # - curly expressions in general
+ (r'(\{)', Punctuation, 'curly'),
+ # - names as part of type declaration
+ (r'(abstract[ \t]+type|primitive[ \t]+type|mutable[ \t]+struct|struct)([\s()]+)(' +
+ allowed_variable + r')', bygroups(Keyword, Text, Keyword.Type)),
+
+ # macros
+ (r'@' + allowed_variable, Name.Decorator),
+ (words([*OPERATORS_LIST, '..', '.', *DOTTED_OPERATORS_LIST],
+ prefix='@', suffix=operator_suffixes), Name.Decorator),
+
+ # keywords
+ (words(KEYWORD_LIST, suffix=r'\b'), Keyword),
+ # builtin types
+ (words(BUILTIN_LIST, suffix=r'\b'), Keyword.Type),
+ # builtin literals
+ (words(LITERAL_LIST, suffix=r'\b'), Name.Builtin),
+
# names
(allowed_variable, Name),
# numbers
- (r'(\d+((_\d+)+)?\.(?!\.)(\d+((_\d+)+)?)?|\.\d+((_\d+)+)?)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+((_\d+)+)?[eEf][+-]?[0-9]+', Number.Float),
- (r'0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?(\.([a-fA-F0-9]+((_[a-fA-F0-9]+)+)?)?)?p[+-]?\d+', Number.Float),
- (r'0b[01]+((_[01]+)+)?', Number.Bin),
- (r'0o[0-7]+((_[0-7]+)+)?', Number.Oct),
- (r'0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?', Number.Hex),
- (r'\d+((_\d+)+)?', Number.Integer),
-
- # single dot operator matched last to permit e.g. ".1" as a float
- (words(['.']), Operator),
+ (r'(\d+((_\d+)+)?\.(?!\.)(\d+((_\d+)+)?)?|\.\d+((_\d+)+)?)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+((_\d+)+)?[eEf][+-]?[0-9]+', Number.Float),
+ (r'0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?(\.([a-fA-F0-9]+((_[a-fA-F0-9]+)+)?)?)?p[+-]?\d+', Number.Float),
+ (r'0b[01]+((_[01]+)+)?', Number.Bin),
+ (r'0o[0-7]+((_[0-7]+)+)?', Number.Oct),
+ (r'0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?', Number.Hex),
+ (r'\d+((_\d+)+)?', Number.Integer),
+
+ # single dot operator matched last to permit e.g. ".1" as a float
+ (words(['.']), Operator),
],
"blockcomment": [
@@ -152,80 +152,80 @@ class JuliaLexer(RegexLexer):
(r'[=#]', Comment.Multiline),
],
- 'curly': [
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- (allowed_variable, Keyword.Type),
- include('root'),
- ],
-
- 'tqrawstring': [
- (r'"""', String, '#pop'),
- (r'([^"]|"[^"][^"])+', String),
- ],
- 'rawstring': [
- (r'"', String, '#pop'),
- (r'\\"', String.Escape),
- (r'([^"\\]|\\[^"])+', String),
- ],
-
- # Interpolation is defined as "$" followed by the shortest full expression, which is
- # something we can't parse.
- # Include the most common cases here: $word, and $(paren'd expr).
- 'interp': [
- (r'\$' + allowed_variable, String.Interpol),
- (r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
- ],
- 'in-intp': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
-
+ 'curly': [
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ (allowed_variable, Keyword.Type),
+ include('root'),
+ ],
+
+ 'tqrawstring': [
+ (r'"""', String, '#pop'),
+ (r'([^"]|"[^"][^"])+', String),
+ ],
+ 'rawstring': [
+ (r'"', String, '#pop'),
+ (r'\\"', String.Escape),
+ (r'([^"\\]|\\[^"])+', String),
+ ],
+
+ # Interpolation is defined as "$" followed by the shortest full expression, which is
+ # something we can't parse.
+ # Include the most common cases here: $word, and $(paren'd expr).
+ 'interp': [
+ (r'\$' + allowed_variable, String.Interpol),
+ (r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
+ ],
+ 'in-intp': [
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+
'string': [
- (r'(")(' + allowed_variable + r'|\d+)?', bygroups(String, String.Affix), '#pop'),
+ (r'(")(' + allowed_variable + r'|\d+)?', bygroups(String, String.Affix), '#pop'),
# FIXME: This escape pattern is not perfect.
(r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
- include('interp'),
+ include('interp'),
# @printf and @sprintf formats
(r'%[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]',
String.Interpol),
- (r'[^"$%\\]+', String),
- (r'.', String),
+ (r'[^"$%\\]+', String),
+ (r'.', String),
],
'tqstring': [
- (r'(""")(' + allowed_variable + r'|\d+)?', bygroups(String, String.Affix), '#pop'),
+ (r'(""")(' + allowed_variable + r'|\d+)?', bygroups(String, String.Affix), '#pop'),
(r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
- include('interp'),
- (r'[^"$%\\]+', String),
- (r'.', String),
+ include('interp'),
+ (r'[^"$%\\]+', String),
+ (r'.', String),
],
'regex': [
- (r'(")([imsxa]*)?', bygroups(String.Regex, String.Affix), '#pop'),
+ (r'(")([imsxa]*)?', bygroups(String.Regex, String.Affix), '#pop'),
(r'\\"', String.Regex),
- (r'[^\\"]+', String.Regex),
+ (r'[^\\"]+', String.Regex),
],
'tqregex': [
- (r'(""")([imsxa]*)?', bygroups(String.Regex, String.Affix), '#pop'),
- (r'[^"]+', String.Regex),
+ (r'(""")([imsxa]*)?', bygroups(String.Regex, String.Affix), '#pop'),
+ (r'[^"]+', String.Regex),
],
'command': [
- (r'(`)(' + allowed_variable + r'|\d+)?', bygroups(String.Backtick, String.Affix), '#pop'),
- (r'\\[`$]', String.Escape),
- include('interp'),
- (r'[^\\`$]+', String.Backtick),
- (r'.', String.Backtick),
- ],
- 'tqcommand': [
- (r'(```)(' + allowed_variable + r'|\d+)?', bygroups(String.Backtick, String.Affix), '#pop'),
- (r'\\\$', String.Escape),
- include('interp'),
- (r'[^\\`$]+', String.Backtick),
- (r'.', String.Backtick),
+ (r'(`)(' + allowed_variable + r'|\d+)?', bygroups(String.Backtick, String.Affix), '#pop'),
+ (r'\\[`$]', String.Escape),
+ include('interp'),
+ (r'[^\\`$]+', String.Backtick),
+ (r'.', String.Backtick),
],
+ 'tqcommand': [
+ (r'(```)(' + allowed_variable + r'|\d+)?', bygroups(String.Backtick, String.Affix), '#pop'),
+ (r'\\\$', String.Escape),
+ include('interp'),
+ (r'[^\\`$]+', String.Backtick),
+ (r'.', String.Backtick),
+ ],
}
def analyse_text(text):
@@ -239,7 +239,7 @@ class JuliaConsoleLexer(Lexer):
.. versionadded:: 1.6
"""
name = 'Julia console'
- aliases = ['jlcon', 'julia-repl']
+ aliases = ['jlcon', 'julia-repl']
def get_tokens_unprocessed(self, text):
jllexer = JuliaLexer(**self.options)
@@ -265,8 +265,8 @@ class JuliaConsoleLexer(Lexer):
curcode += line[6:]
else:
if curcode:
- yield from do_insertions(
- insertions, jllexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(
+ insertions, jllexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
if line.startswith('ERROR: ') or error:
@@ -278,5 +278,5 @@ class JuliaConsoleLexer(Lexer):
start += len(line)
if curcode:
- yield from do_insertions(
- insertions, jllexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(
+ insertions, jllexer.get_tokens_unprocessed(curcode))
diff --git a/contrib/python/Pygments/py3/pygments/lexers/jvm.py b/contrib/python/Pygments/py3/pygments/lexers/jvm.py
index 4ffc5c7fdf..ca949faf91 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/jvm.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/jvm.py
@@ -4,7 +4,7 @@
Pygments lexers for JVM languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -25,7 +25,7 @@ __all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
class JavaLexer(RegexLexer):
"""
- For `Java <https://www.oracle.com/technetwork/java/>`_ source code.
+ For `Java <https://www.oracle.com/technetwork/java/>`_ source code.
"""
name = 'Java'
@@ -51,7 +51,7 @@ class JavaLexer(RegexLexer):
(r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
r'((?:[^\W\d]|\$)[\w$]*)' # method name
r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Punctuation)),
+ bygroups(using(this), Name.Function, Text, Punctuation)),
(r'@[^\W\d][\w.]*', Name.Decorator),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|sealed|static|strictfp|super|synchronized|throws|'
@@ -61,17 +61,17 @@ class JavaLexer(RegexLexer):
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)\b', Keyword.Declaration, 'class'),
- (r'(var)(\s+)', bygroups(Keyword.Declaration, Text),
- 'var'),
+ (r'(var)(\s+)', bygroups(Keyword.Declaration, Text),
+ 'var'),
(r'(import(?:\s+static)?)(\s+)', bygroups(Keyword.Namespace, Text),
'import'),
- (r'"', String, 'string'),
+ (r'"', String, 'string'),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation,
- Name.Attribute)),
- (r'^(\s*)(default)(:)', bygroups(Text, Keyword, Punctuation)),
- (r'^(\s*)((?:[^\W\d]|\$)[\w$]*)(:)', bygroups(Text, Name.Label,
- Punctuation)),
+ (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation,
+ Name.Attribute)),
+ (r'^(\s*)(default)(:)', bygroups(Text, Keyword, Punctuation)),
+ (r'^(\s*)((?:[^\W\d]|\$)[\w$]*)(:)', bygroups(Text, Name.Label,
+ Punctuation)),
(r'([^\W\d]|\$)[\w$]*', Name),
(r'([0-9][0-9_]*\.([0-9][0-9_]*)?|'
r'\.[0-9][0-9_]*)'
@@ -85,27 +85,27 @@ class JavaLexer(RegexLexer):
(r'0[bB][01][01_]*[lL]?', Number.Bin),
(r'0[0-7_]+[lL]?', Number.Oct),
(r'0|[1-9][0-9_]*[lL]?', Number.Integer),
- (r'[~^*!%&\[\]<>|+=/?-]', Operator),
- (r'[{}();:.,]', Punctuation),
+ (r'[~^*!%&\[\]<>|+=/?-]', Operator),
+ (r'[{}();:.,]', Punctuation),
(r'\n', Text)
],
'class': [
(r'\s+', Text),
(r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop')
],
- 'var': [
- (r'([^\W\d]|\$)[\w$]*', Name, '#pop')
- ],
+ 'var': [
+ (r'([^\W\d]|\$)[\w$]*', Name, '#pop')
+ ],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
- 'string': [
- (r'[^\\"]+', String),
- (r'\\\\', String), # Escaped backslash
- (r'\\"', String), # Escaped quote
- (r'\\', String), # Bare backslash
- (r'"', String, '#pop'), # Closing quote
- ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r'\\\\', String), # Escaped backslash
+ (r'\\"', String), # Escaped quote
+ (r'\\', String), # Bare backslash
+ (r'"', String, '#pop'), # Closing quote
+ ],
}
@@ -121,7 +121,7 @@ class AspectJLexer(JavaLexer):
filenames = ['*.aj']
mimetypes = ['text/x-aspectj']
- aj_keywords = {
+ aj_keywords = {
'aspect', 'pointcut', 'privileged', 'call', 'execution',
'initialization', 'preinitialization', 'handler', 'get', 'set',
'staticinitialization', 'target', 'args', 'within', 'withincode',
@@ -131,9 +131,9 @@ class AspectJLexer(JavaLexer):
'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
- }
- aj_inter_type = {'parents:', 'warning:', 'error:', 'soft:', 'precedence:'}
- aj_inter_type_annotation = {'@type', '@method', '@constructor', '@field'}
+ }
+ aj_inter_type = {'parents:', 'warning:', 'error:', 'soft:', 'precedence:'}
+ aj_inter_type_annotation = {'@type', '@method', '@constructor', '@field'}
def get_tokens_unprocessed(self, text):
for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
@@ -160,278 +160,278 @@ class ScalaLexer(RegexLexer):
flags = re.MULTILINE | re.DOTALL
- opchar = '[!#%&*\\-\\/:?@^' + uni.combine('Sm', 'So') + ']'
- letter = '[_\\$' + uni.combine('Ll', 'Lu', 'Lo', 'Nl', 'Lt') + ']'
- upperLetter = '[' + uni.combine('Lu', 'Lt') + ']'
- letterOrDigit = '(?:%s|[0-9])' % letter
- letterOrDigitNoDollarSign = '(?:%s|[0-9])' % letter.replace('\\$', '')
- alphaId = '%s+' % letter
- simpleInterpolatedVariable = '%s%s*' % (letter, letterOrDigitNoDollarSign)
- idrest = '%s%s*(?:(?<=_)%s+)?' % (letter, letterOrDigit, opchar)
- idUpper = '%s%s*(?:(?<=_)%s+)?' % (upperLetter, letterOrDigit, opchar)
- plainid = '(?:%s|%s+)' % (idrest, opchar)
- backQuotedId = r'`[^`]+`'
- anyId = r'(?:%s|%s)' % (plainid, backQuotedId)
- notStartOfComment = r'(?!//|/\*)'
- endOfLineMaybeWithComment = r'(?=\s*(//|$))'
-
- keywords = (
- 'new', 'return', 'throw', 'classOf', 'isInstanceOf', 'asInstanceOf',
- 'else', 'if', 'then', 'do', 'while', 'for', 'yield', 'match', 'case',
- 'catch', 'finally', 'try'
- )
-
- operators = (
- '<%', '=:=', '<:<', '<%<', '>:', '<:', '=', '==', '!=', '<=', '>=',
- '<>', '<', '>', '<-', '←', '->', '→', '=>', '⇒', '?', '@', '|', '-',
- '+', '*', '%', '~', '\\'
- )
-
- storage_modifiers = (
- 'private', 'protected', 'synchronized', '@volatile', 'abstract',
- 'final', 'lazy', 'sealed', 'implicit', 'override', '@transient',
- '@native'
- )
+ opchar = '[!#%&*\\-\\/:?@^' + uni.combine('Sm', 'So') + ']'
+ letter = '[_\\$' + uni.combine('Ll', 'Lu', 'Lo', 'Nl', 'Lt') + ']'
+ upperLetter = '[' + uni.combine('Lu', 'Lt') + ']'
+ letterOrDigit = '(?:%s|[0-9])' % letter
+ letterOrDigitNoDollarSign = '(?:%s|[0-9])' % letter.replace('\\$', '')
+ alphaId = '%s+' % letter
+ simpleInterpolatedVariable = '%s%s*' % (letter, letterOrDigitNoDollarSign)
+ idrest = '%s%s*(?:(?<=_)%s+)?' % (letter, letterOrDigit, opchar)
+ idUpper = '%s%s*(?:(?<=_)%s+)?' % (upperLetter, letterOrDigit, opchar)
+ plainid = '(?:%s|%s+)' % (idrest, opchar)
+ backQuotedId = r'`[^`]+`'
+ anyId = r'(?:%s|%s)' % (plainid, backQuotedId)
+ notStartOfComment = r'(?!//|/\*)'
+ endOfLineMaybeWithComment = r'(?=\s*(//|$))'
+
+ keywords = (
+ 'new', 'return', 'throw', 'classOf', 'isInstanceOf', 'asInstanceOf',
+ 'else', 'if', 'then', 'do', 'while', 'for', 'yield', 'match', 'case',
+ 'catch', 'finally', 'try'
+ )
+
+ operators = (
+ '<%', '=:=', '<:<', '<%<', '>:', '<:', '=', '==', '!=', '<=', '>=',
+ '<>', '<', '>', '<-', '←', '->', '→', '=>', '⇒', '?', '@', '|', '-',
+ '+', '*', '%', '~', '\\'
+ )
+
+ storage_modifiers = (
+ 'private', 'protected', 'synchronized', '@volatile', 'abstract',
+ 'final', 'lazy', 'sealed', 'implicit', 'override', '@transient',
+ '@native'
+ )
tokens = {
'root': [
- include('whitespace'),
- include('comments'),
- include('script-header'),
- include('imports'),
- include('exports'),
- include('storage-modifiers'),
- include('annotations'),
- include('using'),
- include('declarations'),
- include('inheritance'),
- include('extension'),
- include('end'),
- include('constants'),
- include('strings'),
- include('symbols'),
- include('singleton-type'),
- include('inline'),
- include('quoted'),
- include('keywords'),
- include('operators'),
- include('punctuation'),
- include('names'),
- ],
-
- # Includes:
- 'whitespace': [
- (r'\s+', Text),
- ],
- 'comments': [
- (r'//.*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- ],
- 'script-header': [
- (r'^#!([^\n]*)$', Comment.Hashbang),
- ],
- 'imports': [
- (r'\b(import)(\s+)', bygroups(Keyword, Text), 'import-path'),
- ],
- 'exports': [
- (r'\b(export)(\s+)(given)(\s+)',
- bygroups(Keyword, Text, Keyword, Text), 'export-path'),
- (r'\b(export)(\s+)', bygroups(Keyword, Text), 'export-path'),
- ],
- 'storage-modifiers': [
- (words(storage_modifiers, prefix=r'\b', suffix=r'\b'), Keyword),
- # Only highlight soft modifiers if they are eventually followed by
- # the correct keyword. Note that soft modifiers can be followed by a
- # sequence of regular modifiers; [a-z\s]* skips those, and we just
- # check that the soft modifier is applied to a supported statement.
- (r'\b(transparent|opaque|infix|open|inline)\b(?=[a-z\s]*\b'
- r'(def|val|var|given|type|class|trait|object|enum)\b)', Keyword),
- ],
- 'annotations': [
- (r'@%s' % idrest, Name.Decorator),
- ],
- 'using': [
- # using is a soft keyword, can only be used in the first position of
- # a parameter or argument list.
- (r'(\()(\s*)(using)(\s)', bygroups(Punctuation, Text, Keyword, Text)),
- ],
- 'declarations': [
- (r'\b(def)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
- bygroups(Keyword, Text, Name.Function)),
- (r'\b(trait)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
- bygroups(Keyword, Text, Name.Class)),
- (r'\b(?:(case)(\s+))?(class|object|enum)\b(\s*)%s(%s)?' %
- (notStartOfComment, anyId),
- bygroups(Keyword, Text, Keyword, Text, Name.Class)),
- (r'(?<!\.)\b(type)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
- bygroups(Keyword, Text, Name.Class)),
- (r'\b(val|var)\b', Keyword.Declaration),
- (r'\b(package)(\s+)(object)\b(\s*)%s(%s)?' %
- (notStartOfComment, anyId),
- bygroups(Keyword, Text, Keyword, Text, Name.Namespace)),
- (r'\b(package)(\s+)', bygroups(Keyword, Text), 'package'),
- (r'\b(given)\b(\s*)(%s)' % idUpper,
- bygroups(Keyword, Text, Name.Class)),
- (r'\b(given)\b(\s*)(%s)?' % anyId,
- bygroups(Keyword, Text, Name)),
- ],
- 'inheritance': [
- (r'\b(extends|with|derives)\b(\s*)'
- r'(%s|%s|(?=\([^\)]+=>)|(?=%s)|(?="))?' %
- (idUpper, backQuotedId, plainid),
- bygroups(Keyword, Text, Name.Class)),
- ],
- 'extension': [
- (r'\b(extension)(\s+)(?=[\[\(])', bygroups(Keyword, Text)),
- ],
- 'end': [
- # end is a soft keyword, should only be highlighted in certain cases
- (r'\b(end)(\s+)(if|while|for|match|new|extension|val|var)\b',
- bygroups(Keyword, Text, Keyword)),
- (r'\b(end)(\s+)(%s)%s' % (idUpper, endOfLineMaybeWithComment),
- bygroups(Keyword, Text, Name.Class)),
- (r'\b(end)(\s+)(%s|%s)?%s' %
- (backQuotedId, plainid, endOfLineMaybeWithComment),
- bygroups(Keyword, Text, Name.Namespace)),
- ],
- 'punctuation': [
- (r'[{}()\[\];,.]', Punctuation),
- (r'(?<!:):(?!:)', Punctuation),
- ],
- 'keywords': [
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
- ],
- 'operators': [
- (r'(%s{2,})(\s+)' % opchar, bygroups(Operator, Text)),
- (r'/(?![/*])', Operator),
- (words(operators), Operator),
- (r'(?<!%s)(!|&&|\|\|)(?!%s)' % (opchar, opchar), Operator),
- ],
- 'constants': [
- (r'\b(this|super)\b', Name.Builtin.Pseudo),
+ include('whitespace'),
+ include('comments'),
+ include('script-header'),
+ include('imports'),
+ include('exports'),
+ include('storage-modifiers'),
+ include('annotations'),
+ include('using'),
+ include('declarations'),
+ include('inheritance'),
+ include('extension'),
+ include('end'),
+ include('constants'),
+ include('strings'),
+ include('symbols'),
+ include('singleton-type'),
+ include('inline'),
+ include('quoted'),
+ include('keywords'),
+ include('operators'),
+ include('punctuation'),
+ include('names'),
+ ],
+
+ # Includes:
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+ 'comments': [
+ (r'//.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ ],
+ 'script-header': [
+ (r'^#!([^\n]*)$', Comment.Hashbang),
+ ],
+ 'imports': [
+ (r'\b(import)(\s+)', bygroups(Keyword, Text), 'import-path'),
+ ],
+ 'exports': [
+ (r'\b(export)(\s+)(given)(\s+)',
+ bygroups(Keyword, Text, Keyword, Text), 'export-path'),
+ (r'\b(export)(\s+)', bygroups(Keyword, Text), 'export-path'),
+ ],
+ 'storage-modifiers': [
+ (words(storage_modifiers, prefix=r'\b', suffix=r'\b'), Keyword),
+ # Only highlight soft modifiers if they are eventually followed by
+ # the correct keyword. Note that soft modifiers can be followed by a
+ # sequence of regular modifiers; [a-z\s]* skips those, and we just
+ # check that the soft modifier is applied to a supported statement.
+ (r'\b(transparent|opaque|infix|open|inline)\b(?=[a-z\s]*\b'
+ r'(def|val|var|given|type|class|trait|object|enum)\b)', Keyword),
+ ],
+ 'annotations': [
+ (r'@%s' % idrest, Name.Decorator),
+ ],
+ 'using': [
+ # using is a soft keyword, can only be used in the first position of
+ # a parameter or argument list.
+ (r'(\()(\s*)(using)(\s)', bygroups(Punctuation, Text, Keyword, Text)),
+ ],
+ 'declarations': [
+ (r'\b(def)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
+ bygroups(Keyword, Text, Name.Function)),
+ (r'\b(trait)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
+ bygroups(Keyword, Text, Name.Class)),
+ (r'\b(?:(case)(\s+))?(class|object|enum)\b(\s*)%s(%s)?' %
+ (notStartOfComment, anyId),
+ bygroups(Keyword, Text, Keyword, Text, Name.Class)),
+ (r'(?<!\.)\b(type)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
+ bygroups(Keyword, Text, Name.Class)),
+ (r'\b(val|var)\b', Keyword.Declaration),
+ (r'\b(package)(\s+)(object)\b(\s*)%s(%s)?' %
+ (notStartOfComment, anyId),
+ bygroups(Keyword, Text, Keyword, Text, Name.Namespace)),
+ (r'\b(package)(\s+)', bygroups(Keyword, Text), 'package'),
+ (r'\b(given)\b(\s*)(%s)' % idUpper,
+ bygroups(Keyword, Text, Name.Class)),
+ (r'\b(given)\b(\s*)(%s)?' % anyId,
+ bygroups(Keyword, Text, Name)),
+ ],
+ 'inheritance': [
+ (r'\b(extends|with|derives)\b(\s*)'
+ r'(%s|%s|(?=\([^\)]+=>)|(?=%s)|(?="))?' %
+ (idUpper, backQuotedId, plainid),
+ bygroups(Keyword, Text, Name.Class)),
+ ],
+ 'extension': [
+ (r'\b(extension)(\s+)(?=[\[\(])', bygroups(Keyword, Text)),
+ ],
+ 'end': [
+ # end is a soft keyword, should only be highlighted in certain cases
+ (r'\b(end)(\s+)(if|while|for|match|new|extension|val|var)\b',
+ bygroups(Keyword, Text, Keyword)),
+ (r'\b(end)(\s+)(%s)%s' % (idUpper, endOfLineMaybeWithComment),
+ bygroups(Keyword, Text, Name.Class)),
+ (r'\b(end)(\s+)(%s|%s)?%s' %
+ (backQuotedId, plainid, endOfLineMaybeWithComment),
+ bygroups(Keyword, Text, Name.Namespace)),
+ ],
+ 'punctuation': [
+ (r'[{}()\[\];,.]', Punctuation),
+ (r'(?<!:):(?!:)', Punctuation),
+ ],
+ 'keywords': [
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
+ ],
+ 'operators': [
+ (r'(%s{2,})(\s+)' % opchar, bygroups(Operator, Text)),
+ (r'/(?![/*])', Operator),
+ (words(operators), Operator),
+ (r'(?<!%s)(!|&&|\|\|)(?!%s)' % (opchar, opchar), Operator),
+ ],
+ 'constants': [
+ (r'\b(this|super)\b', Name.Builtin.Pseudo),
(r'(true|false|null)\b', Keyword.Constant),
- (r'0[xX][0-9a-fA-F_]*', Number.Hex),
- (r'([0-9][0-9_]*\.[0-9][0-9_]*|\.[0-9][0-9_]*)'
- r'([eE][+-]?[0-9][0-9_]*)?[fFdD]?', Number.Float),
- (r'[0-9]+([eE][+-]?[0-9]+)?[fFdD]', Number.Float),
- (r'[0-9]+([eE][+-]?[0-9]+)[fFdD]?', Number.Float),
- (r'[0-9]+[lL]', Number.Integer.Long),
- (r'[0-9]+', Number.Integer),
+ (r'0[xX][0-9a-fA-F_]*', Number.Hex),
+ (r'([0-9][0-9_]*\.[0-9][0-9_]*|\.[0-9][0-9_]*)'
+ r'([eE][+-]?[0-9][0-9_]*)?[fFdD]?', Number.Float),
+ (r'[0-9]+([eE][+-]?[0-9]+)?[fFdD]', Number.Float),
+ (r'[0-9]+([eE][+-]?[0-9]+)[fFdD]?', Number.Float),
+ (r'[0-9]+[lL]', Number.Integer.Long),
+ (r'[0-9]+', Number.Integer),
(r'""".*?"""(?!")', String),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"(')(\\.)(')", bygroups(String.Char, String.Escape, String.Char)),
- (r"'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- ],
- "strings": [
- (r'[fs]"""', String, 'interpolated-string-triple'),
- (r'[fs]"', String, 'interpolated-string'),
- (r'raw"(\\\\|\\"|[^"])*"', String),
- ],
- 'symbols': [
- (r"('%s)(?!')" % plainid, String.Symbol),
- ],
- 'singleton-type': [
- (r'(\.)(type)\b', bygroups(Punctuation, Keyword)),
- ],
- 'inline': [
- # inline is a soft modifer, only highlighted if followed by if,
- # match or parameters.
- (r'\b(inline)(?=\s+(%s|%s)\s*:)' % (plainid, backQuotedId),
- Keyword),
- (r'\b(inline)\b(?=(?:.(?!\b(?:val|def|given)\b))*\b(if|match)\b)',
- Keyword),
- ],
- 'quoted': [
- # '{...} or ${...}
- (r"['$]\{(?!')", Punctuation),
- # '[...]
- (r"'\[(?!')", Punctuation),
- ],
- 'names': [
- (idUpper, Name.Class),
- (anyId, Name),
- ],
-
- # States
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"(')(\\.)(')", bygroups(String.Char, String.Escape, String.Char)),
+ (r"'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
+ ],
+ "strings": [
+ (r'[fs]"""', String, 'interpolated-string-triple'),
+ (r'[fs]"', String, 'interpolated-string'),
+ (r'raw"(\\\\|\\"|[^"])*"', String),
+ ],
+ 'symbols': [
+ (r"('%s)(?!')" % plainid, String.Symbol),
+ ],
+ 'singleton-type': [
+ (r'(\.)(type)\b', bygroups(Punctuation, Keyword)),
+ ],
+ 'inline': [
+ # inline is a soft modifer, only highlighted if followed by if,
+ # match or parameters.
+ (r'\b(inline)(?=\s+(%s|%s)\s*:)' % (plainid, backQuotedId),
+ Keyword),
+ (r'\b(inline)\b(?=(?:.(?!\b(?:val|def|given)\b))*\b(if|match)\b)',
+ Keyword),
+ ],
+ 'quoted': [
+ # '{...} or ${...}
+ (r"['$]\{(?!')", Punctuation),
+ # '[...]
+ (r"'\[(?!')", Punctuation),
+ ],
+ 'names': [
+ (idUpper, Name.Class),
+ (anyId, Name),
+ ],
+
+ # States
'comment': [
(r'[^/*]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'import-path': [
- (r'(?<=[\n;:])', Text, '#pop'),
- include('comments'),
- (r'\b(given)\b', Keyword),
- include('qualified-name'),
- (r'\{', Punctuation, 'import-path-curly-brace'),
- ],
- 'import-path-curly-brace': [
- include('whitespace'),
- include('comments'),
- (r'\b(given)\b', Keyword),
- (r'=>', Operator),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'[\[\]]', Punctuation),
- include('qualified-name'),
- ],
- 'export-path': [
- (r'(?<=[\n;:])', Text, '#pop'),
- include('comments'),
- include('qualified-name'),
- (r'\{', Punctuation, 'export-path-curly-brace'),
- ],
- 'export-path-curly-brace': [
- include('whitespace'),
- include('comments'),
- (r'=>', Operator),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation),
- include('qualified-name'),
- ],
- 'package': [
- (r'(?<=[\n;])', Text, '#pop'),
- (r':', Punctuation, '#pop'),
- include('comments'),
- include('qualified-name'),
- ],
- 'interpolated-string-triple': [
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'import-path': [
+ (r'(?<=[\n;:])', Text, '#pop'),
+ include('comments'),
+ (r'\b(given)\b', Keyword),
+ include('qualified-name'),
+ (r'\{', Punctuation, 'import-path-curly-brace'),
+ ],
+ 'import-path-curly-brace': [
+ include('whitespace'),
+ include('comments'),
+ (r'\b(given)\b', Keyword),
+ (r'=>', Operator),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'[\[\]]', Punctuation),
+ include('qualified-name'),
+ ],
+ 'export-path': [
+ (r'(?<=[\n;:])', Text, '#pop'),
+ include('comments'),
+ include('qualified-name'),
+ (r'\{', Punctuation, 'export-path-curly-brace'),
+ ],
+ 'export-path-curly-brace': [
+ include('whitespace'),
+ include('comments'),
+ (r'=>', Operator),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation),
+ include('qualified-name'),
+ ],
+ 'package': [
+ (r'(?<=[\n;])', Text, '#pop'),
+ (r':', Punctuation, '#pop'),
+ include('comments'),
+ include('qualified-name'),
+ ],
+ 'interpolated-string-triple': [
(r'"""(?!")', String, '#pop'),
(r'"', String),
- include('interpolated-string-common'),
+ include('interpolated-string-common'),
],
- 'interpolated-string': [
+ 'interpolated-string': [
(r'"', String, '#pop'),
- include('interpolated-string-common'),
+ include('interpolated-string-common'),
],
- 'interpolated-string-brace': [
+ 'interpolated-string-brace': [
(r'\}', String.Interpol, '#pop'),
- (r'\{', Punctuation, 'interpolated-string-nested-brace'),
+ (r'\{', Punctuation, 'interpolated-string-nested-brace'),
include('root'),
],
- 'interpolated-string-nested-brace': [
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- include('root'),
- ],
-
- # Helpers
- 'qualified-name': [
- (idUpper, Name.Class),
- (r'(%s)(\.)' % anyId, bygroups(Name.Namespace, Punctuation)),
- (r'\.', Punctuation),
- (anyId, Name),
- (r'[^\S\n]+', Text),
- ],
- 'interpolated-string-common': [
- (r'[^"$\\]+', String),
- (r'\$\$', String.Escape),
- (r'(\$)(%s)' % simpleInterpolatedVariable,
- bygroups(String.Interpol, Name)),
- (r'\$\{', String.Interpol, 'interpolated-string-brace'),
- (r'\\.', String),
- ],
+ 'interpolated-string-nested-brace': [
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ include('root'),
+ ],
+
+ # Helpers
+ 'qualified-name': [
+ (idUpper, Name.Class),
+ (r'(%s)(\.)' % anyId, bygroups(Name.Namespace, Punctuation)),
+ (r'\.', Punctuation),
+ (anyId, Name),
+ (r'[^\S\n]+', Text),
+ ],
+ 'interpolated-string-common': [
+ (r'[^"$\\]+', String),
+ (r'\$\$', String.Escape),
+ (r'(\$)(%s)' % simpleInterpolatedVariable,
+ bygroups(String.Interpol, Name)),
+ (r'\$\{', String.Interpol, 'interpolated-string-brace'),
+ (r'\\.', String),
+ ],
}
@@ -529,7 +529,7 @@ class GosuTemplateLexer(Lexer):
def get_tokens_unprocessed(self, text):
lexer = GosuLexer()
stack = ['templateText']
- yield from lexer.get_tokens_unprocessed(text, stack)
+ yield from lexer.get_tokens_unprocessed(text, stack)
class GroovyLexer(RegexLexer):
@@ -556,21 +556,21 @@ class GroovyLexer(RegexLexer):
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
- # keywords: go before method names to avoid lexing "throw new XYZ"
- # as a method signature
+ # keywords: go before method names to avoid lexing "throw new XYZ"
+ # as a method signature
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
Keyword),
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
- r'('
- r'[a-zA-Z_]\w*' # method name
- r'|"(?:\\\\|\\[^\\]|[^"\\])*"' # or double-quoted method name
- r"|'(?:\\\\|\\[^\\]|[^'\\])*'" # or single-quoted method name
- r')'
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
+ r'('
+ r'[a-zA-Z_]\w*' # method name
+ r'|"(?:\\\\|\\[^\\]|[^"\\])*"' # or double-quoted method name
+ r"|'(?:\\\\|\\[^\\]|[^'\\])*'" # or single-quoted method name
+ r')'
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
@@ -583,10 +583,10 @@ class GroovyLexer(RegexLexer):
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'""".*?"""', String.Double),
(r"'''.*?'''", String.Single),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'\$/((?!/\$).)*/\$', String),
- (r'/(\\\\|\\[^\\]|[^/\\])*/', String),
+ (r'/(\\\\|\\[^\\]|[^/\\])*/', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_]\w*:', Name.Label),
@@ -679,7 +679,7 @@ class IokeLexer(RegexLexer):
# Symbols
(r':[\w!:?]+', String.Symbol),
(r'[\w!:?]+:(?![\w!?])', String.Other),
- (r':"(\\\\|\\[^\\]|[^"\\])*"', String.Symbol),
+ (r':"(\\\\|\\[^\\]|[^"\\])*"', String.Symbol),
# Documentation
(r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
@@ -766,9 +766,9 @@ class IokeLexer(RegexLexer):
r'System|Text|Tuple)(?![\w!:?])', Name.Builtin),
# functions
- ('(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
- 'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
- '(?![\\w!:?])', Name.Function),
+ ('(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
+ 'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
+ '(?![\\w!:?])', Name.Function),
# Numbers
(r'-?0[xX][0-9a-fA-F]+', Number.Hex),
@@ -784,7 +784,7 @@ class IokeLexer(RegexLexer):
r'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
r'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
r'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
- r'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
+ r'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
(r'(and|nand|or|xor|nor|return|import)(?![\w!?])',
Operator),
@@ -897,7 +897,7 @@ class ClojureLexer(RegexLexer):
(r'0x-?[abcdef\d]+', Number.Hex),
# strings, symbols and characters
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r"'" + valid_name, String.Symbol),
(r"\\(.|[a-z]+)", String.Char),
@@ -978,8 +978,8 @@ class TeaLangLexer(RegexLexer):
(r'(true|false|null)\b', Keyword.Constant),
(r'(template)(\s+)', bygroups(Keyword.Declaration, Text), 'template'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_\$]\w*', Name),
@@ -1040,7 +1040,7 @@ class CeylonLexer(RegexLexer):
(r'(class|interface|object|alias)(\s+)',
bygroups(Keyword.Declaration, Text), 'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r"'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'", String.Char),
(r'(\.)([a-z_]\w*)',
bygroups(Operator, Name.Attribute)),
@@ -1084,10 +1084,10 @@ class KotlinLexer(RegexLexer):
.. versionadded:: 1.5
"""
-
+
name = 'Kotlin'
aliases = ['kotlin']
- filenames = ['*.kt', '*.kts']
+ filenames = ['*.kt', '*.kts']
mimetypes = ['text/x-kotlin']
flags = re.MULTILINE | re.DOTALL | re.UNICODE
@@ -1095,140 +1095,140 @@ class KotlinLexer(RegexLexer):
kt_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
'[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
'Mn', 'Mc') + ']*')
-
+
kt_space_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
'[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
- 'Mn', 'Mc', 'Zs')
- + r'\'~!%^&*()+=|\[\]:;,.<>/\?-]*')
+ 'Mn', 'Mc', 'Zs')
+ + r'\'~!%^&*()+=|\[\]:;,.<>/\?-]*')
kt_id = '(' + kt_name + '|`' + kt_space_name + '`)'
- modifiers = (r'actual|abstract|annotation|companion|const|crossinline|'
- r'data|enum|expect|external|final|infix|inline|inner|'
- r'internal|lateinit|noinline|open|operator|override|private|'
- r'protected|public|sealed|suspend|tailrec')
-
+ modifiers = (r'actual|abstract|annotation|companion|const|crossinline|'
+ r'data|enum|expect|external|final|infix|inline|inner|'
+ r'internal|lateinit|noinline|open|operator|override|private|'
+ r'protected|public|sealed|suspend|tailrec')
+
tokens = {
'root': [
- # Whitespaces
+ # Whitespaces
(r'[^\S\n]+', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
- (r'\n', Text),
- # Comments
+ (r'\n', Text),
+ # Comments
(r'//.*?\n', Comment.Single),
- (r'^#!/.+?\n', Comment.Single), # shebang for kotlin scripts
+ (r'^#!/.+?\n', Comment.Single), # shebang for kotlin scripts
(r'/[*].*?[*]/', Comment.Multiline),
- # Keywords
- (r'as\?', Keyword),
- (r'(as|break|by|catch|constructor|continue|do|dynamic|else|finally|'
- r'get|for|if|init|[!]*in|[!]*is|out|reified|return|set|super|this|'
- r'throw|try|typealias|typeof|vararg|when|where|while)\b', Keyword),
- (r'it\b', Name.Builtin),
- # Built-in types
- (words(('Boolean?', 'Byte?', 'Char?', 'Double?', 'Float?',
- 'Int?', 'Long?', 'Short?', 'String?', 'Any?', 'Unit?')), Keyword.Type),
- (words(('Boolean', 'Byte', 'Char', 'Double', 'Float',
- 'Int', 'Long', 'Short', 'String', 'Any', 'Unit'), suffix=r'\b'), Keyword.Type),
- # Constants
- (r'(true|false|null)\b', Keyword.Constant),
- # Imports
- (r'(package|import)(\s+)(\S+)', bygroups(Keyword, Text, Name.Namespace)),
- # Dot access
- (r'(\?\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)),
- (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation, Name.Attribute)),
- # Annotations
- (r'@[^\W\d][\w.]*', Name.Decorator),
- # Labels
- (r'[^\W\d][\w.]+@', Name.Decorator),
- # Object expression
- (r'(object)(\s+)(:)(\s+)', bygroups(Keyword, Text, Punctuation, Text), 'class'),
- # Types
- (r'((?:(?:' + modifiers + r'|fun)\s+)*)(class|interface|object)(\s+)',
- bygroups(using(this, state='modifiers'), Keyword.Declaration, Text), 'class'),
- # Variables
- (r'(var|val)(\s+)(\()', bygroups(Keyword.Declaration, Text, Punctuation),
- 'destructuring_assignment'),
- (r'((?:(?:' + modifiers + r')\s+)*)(var|val)(\s+)',
- bygroups(using(this, state='modifiers'), Keyword.Declaration, Text), 'variable'),
- # Functions
- (r'((?:(?:' + modifiers + r')\s+)*)(fun)(\s+)',
- bygroups(using(this, state='modifiers'), Keyword.Declaration, Text), 'function'),
- # Operators
+ # Keywords
+ (r'as\?', Keyword),
+ (r'(as|break|by|catch|constructor|continue|do|dynamic|else|finally|'
+ r'get|for|if|init|[!]*in|[!]*is|out|reified|return|set|super|this|'
+ r'throw|try|typealias|typeof|vararg|when|where|while)\b', Keyword),
+ (r'it\b', Name.Builtin),
+ # Built-in types
+ (words(('Boolean?', 'Byte?', 'Char?', 'Double?', 'Float?',
+ 'Int?', 'Long?', 'Short?', 'String?', 'Any?', 'Unit?')), Keyword.Type),
+ (words(('Boolean', 'Byte', 'Char', 'Double', 'Float',
+ 'Int', 'Long', 'Short', 'String', 'Any', 'Unit'), suffix=r'\b'), Keyword.Type),
+ # Constants
+ (r'(true|false|null)\b', Keyword.Constant),
+ # Imports
+ (r'(package|import)(\s+)(\S+)', bygroups(Keyword, Text, Name.Namespace)),
+ # Dot access
+ (r'(\?\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)),
+ (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation, Name.Attribute)),
+ # Annotations
+ (r'@[^\W\d][\w.]*', Name.Decorator),
+ # Labels
+ (r'[^\W\d][\w.]+@', Name.Decorator),
+ # Object expression
+ (r'(object)(\s+)(:)(\s+)', bygroups(Keyword, Text, Punctuation, Text), 'class'),
+ # Types
+ (r'((?:(?:' + modifiers + r'|fun)\s+)*)(class|interface|object)(\s+)',
+ bygroups(using(this, state='modifiers'), Keyword.Declaration, Text), 'class'),
+ # Variables
+ (r'(var|val)(\s+)(\()', bygroups(Keyword.Declaration, Text, Punctuation),
+ 'destructuring_assignment'),
+ (r'((?:(?:' + modifiers + r')\s+)*)(var|val)(\s+)',
+ bygroups(using(this, state='modifiers'), Keyword.Declaration, Text), 'variable'),
+ # Functions
+ (r'((?:(?:' + modifiers + r')\s+)*)(fun)(\s+)',
+ bygroups(using(this, state='modifiers'), Keyword.Declaration, Text), 'function'),
+ # Operators
(r'::|!!|\?[:.]', Operator),
- (r'[~^*!%&\[\]<>|+=/?-]', Operator),
- # Punctuation
- (r'[{}();:.,]', Punctuation),
- # Strings
- (r'"""', String, 'multiline_string'),
- (r'"', String, 'string'),
+ (r'[~^*!%&\[\]<>|+=/?-]', Operator),
+ # Punctuation
+ (r'[{}();:.,]', Punctuation),
+ # Strings
+ (r'"""', String, 'multiline_string'),
+ (r'"', String, 'string'),
(r"'\\.'|'[^\\]'", String.Char),
- # Numbers
+ # Numbers
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- # Identifiers
- (r'' + kt_id + r'((\?[^.])?)', Name) # additionally handle nullable types
+ # Identifiers
+ (r'' + kt_id + r'((\?[^.])?)', Name) # additionally handle nullable types
],
'class': [
(kt_id, Name.Class, '#pop')
],
- 'variable': [
- (kt_id, Name.Variable, '#pop')
+ 'variable': [
+ (kt_id, Name.Variable, '#pop')
],
- 'destructuring_assignment': [
- (r',', Punctuation),
- (r'\s+', Text),
- (kt_id, Name.Variable),
- (r'(:)(\s+)(' + kt_id + ')', bygroups(Punctuation, Text, Name)),
- (r'<', Operator, 'generic'),
- (r'\)', Punctuation, '#pop')
+ 'destructuring_assignment': [
+ (r',', Punctuation),
+ (r'\s+', Text),
+ (kt_id, Name.Variable),
+ (r'(:)(\s+)(' + kt_id + ')', bygroups(Punctuation, Text, Name)),
+ (r'<', Operator, 'generic'),
+ (r'\)', Punctuation, '#pop')
],
'function': [
- (r'<', Operator, 'generic'),
- (r'' + kt_id + r'(\.)' + kt_id, bygroups(Name, Punctuation, Name.Function), '#pop'),
+ (r'<', Operator, 'generic'),
+ (r'' + kt_id + r'(\.)' + kt_id, bygroups(Name, Punctuation, Name.Function), '#pop'),
(kt_id, Name.Function, '#pop')
],
'generic': [
- (r'(>)(\s*)', bygroups(Operator, Text), '#pop'),
- (r':', Punctuation),
+ (r'(>)(\s*)', bygroups(Operator, Text), '#pop'),
+ (r':', Punctuation),
(r'(reified|out|in)\b', Keyword),
- (r',', Punctuation),
- (r'\s+', Text),
- (kt_id, Name)
- ],
- 'modifiers': [
- (r'\w+', Keyword.Declaration),
- (r'\s+', Text),
- default('#pop')
- ],
- 'string': [
- (r'"', String, '#pop'),
- include('string_common')
- ],
- 'multiline_string': [
- (r'"""', String, '#pop'),
- (r'"', String),
- include('string_common')
- ],
- 'string_common': [
- (r'\\\\', String), # escaped backslash
- (r'\\"', String), # escaped quote
- (r'\\', String), # bare backslash
- (r'\$\{', String.Interpol, 'interpolation'),
- (r'(\$)(\w+)', bygroups(String.Interpol, Name)),
- (r'[^\\"$]+', String)
- ],
- 'interpolation': [
- (r'"', String),
- (r'\$\{', String.Interpol, 'interpolation'),
- (r'\{', Punctuation, 'scope'),
- (r'\}', String.Interpol, '#pop'),
- include('root')
- ],
- 'scope': [
- (r'\{', Punctuation, 'scope'),
- (r'\}', Punctuation, '#pop'),
- include('root')
+ (r',', Punctuation),
+ (r'\s+', Text),
+ (kt_id, Name)
+ ],
+ 'modifiers': [
+ (r'\w+', Keyword.Declaration),
+ (r'\s+', Text),
+ default('#pop')
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ include('string_common')
+ ],
+ 'multiline_string': [
+ (r'"""', String, '#pop'),
+ (r'"', String),
+ include('string_common')
+ ],
+ 'string_common': [
+ (r'\\\\', String), # escaped backslash
+ (r'\\"', String), # escaped quote
+ (r'\\', String), # bare backslash
+ (r'\$\{', String.Interpol, 'interpolation'),
+ (r'(\$)(\w+)', bygroups(String.Interpol, Name)),
+ (r'[^\\"$]+', String)
+ ],
+ 'interpolation': [
+ (r'"', String),
+ (r'\$\{', String.Interpol, 'interpolation'),
+ (r'\{', Punctuation, 'scope'),
+ (r'\}', String.Interpol, '#pop'),
+ include('root')
+ ],
+ 'scope': [
+ (r'\{', Punctuation, 'scope'),
+ (r'\}', Punctuation, '#pop'),
+ include('root')
]
}
@@ -1273,9 +1273,9 @@ class XtendLexer(RegexLexer):
'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r"(''')", String, 'template'),
- (r'(\u00BB)', String, 'template'),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'(\u00BB)', String, 'template'),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_$]\w*', Name),
(r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
@@ -1292,7 +1292,7 @@ class XtendLexer(RegexLexer):
],
'template': [
(r"'''", String, '#pop'),
- (r'\u00AB', String, '#pop'),
+ (r'\u00AB', String, '#pop'),
(r'.', String)
],
}
@@ -1739,63 +1739,63 @@ class JasminLexer(RegexLexer):
r'inner|interface|limit|set|signature|stack)\b', text,
re.MULTILINE):
score += 0.6
- return min(score, 1.0)
+ return min(score, 1.0)
class SarlLexer(RegexLexer):
- """
- For `SARL <http://www.sarl.io>`_ source code.
-
- .. versionadded:: 2.4
- """
-
- name = 'SARL'
- aliases = ['sarl']
- filenames = ['*.sarl']
- mimetypes = ['text/x-sarl']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_$][\w$]*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(as|break|case|catch|default|do|else|extends|extension|finally|'
- r'fires|for|if|implements|instanceof|new|on|requires|return|super|'
- r'switch|throw|throws|try|typeof|uses|while|with)\b',
- Keyword),
- (r'(abstract|def|dispatch|final|native|override|private|protected|'
- r'public|static|strictfp|synchronized|transient|val|var|volatile)\b',
- Keyword.Declaration),
- (r'(boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
- (r'(false|it|null|occurrence|this|true|void)\b', Keyword.Constant),
- (r'(agent|annotation|artifact|behavior|capacity|class|enum|event|'
- r'interface|skill|space)(\s+)', bygroups(Keyword.Declaration, Text),
- 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text)
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
+ """
+ For `SARL <http://www.sarl.io>`_ source code.
+
+ .. versionadded:: 2.4
+ """
+
+ name = 'SARL'
+ aliases = ['sarl']
+ filenames = ['*.sarl']
+ mimetypes = ['text/x-sarl']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_$][\w$]*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ (r'(as|break|case|catch|default|do|else|extends|extension|finally|'
+ r'fires|for|if|implements|instanceof|new|on|requires|return|super|'
+ r'switch|throw|throws|try|typeof|uses|while|with)\b',
+ Keyword),
+ (r'(abstract|def|dispatch|final|native|override|private|protected|'
+ r'public|static|strictfp|synchronized|transient|val|var|volatile)\b',
+ Keyword.Declaration),
+ (r'(boolean|byte|char|double|float|int|long|short|void)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(false|it|null|occurrence|this|true|void)\b', Keyword.Constant),
+ (r'(agent|annotation|artifact|behavior|capacity|class|enum|event|'
+ r'interface|skill|space)(\s+)', bygroups(Keyword.Declaration, Text),
+ 'class'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/kuin.py b/contrib/python/Pygments/py3/pygments/lexers/kuin.py
index 676efef172..588deee49c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/kuin.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/kuin.py
@@ -1,301 +1,301 @@
-"""
- pygments.lexers.kuin
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Kuin language.
+"""
+ pygments.lexers.kuin
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Kuin language.
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
-"""
-
+"""
+
from pygments.lexer import RegexLexer, include, using, this, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, Number, Punctuation
-
-__all__ = ['KuinLexer']
-
-class KuinLexer(RegexLexer):
- """
- For `Kuin <https://github.com/kuina/Kuin>`_ source code
-
- .. versionadded:: 2.9
- """
- name = 'Kuin'
- aliases = ['kuin']
- filenames = ['*.kn']
-
- tokens = {
- 'root': [
- include('statement'),
- ],
- 'statement': [
- # Whitespace / Comment
- include('whitespace'),
-
- # Block-statement
- (r'(\+?[ \t]*\*?[ \t]*\bfunc)([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Function), 'func_'),
- (r'\b(class)([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Class), 'class_'),
- (r'\b(enum)([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Constant), 'enum_'),
- (r'\b(block)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'block_'),
- (r'\b(ifdef)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'ifdef_'),
- (r'\b(if)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'if_'),
- (r'\b(switch)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'switch_'),
- (r'\b(while)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'while_'),
- (r'\b(for)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'for_'),
- (r'\b(foreach)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'foreach_'),
- (r'\b(try)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'try_'),
-
- # Line-statement
- (r'\b(do)\b', Keyword, 'do'),
- (r'(\+?[ \t]*\bvar)\b', Keyword, 'var'),
- (r'\b(const)\b', Keyword, 'const'),
- (r'\b(ret)\b', Keyword, 'ret'),
- (r'\b(throw)\b', Keyword, 'throw'),
- (r'\b(alias)\b', Keyword, 'alias'),
- (r'\b(assert)\b', Keyword, 'assert'),
- (r'\|', Text, 'continued_line'),
- (r'[ \t]*\n', Text),
- ],
-
- # Whitespace / Comment
- 'whitespace': [
- (r'^[ \t]*;.*', Comment.Single),
- (r'[ \t]+(?![; \t])', Text),
- (r'\{', Comment.Multiline, 'multiline_comment'),
- ],
- 'multiline_comment': [
- (r'\{', Comment.Multiline, 'multiline_comment'),
- (r'(?:\s*;.*|[^{}\n]+)', Comment.Multiline),
- (r'\n', Comment.Multiline),
- (r'\}', Comment.Multiline, '#pop'),
- ],
-
- # Block-statement
- 'func_': [
- include('expr'),
- (r'\n', Text, 'func'),
- ],
- 'func': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(func)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('statement'),
- ],
- 'class_': [
- include('expr'),
- (r'\n', Text, 'class'),
- ],
- 'class': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(class)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('statement'),
- ],
- 'enum_': [
- include('expr'),
- (r'\n', Text, 'enum'),
- ],
- 'enum': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(enum)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('expr'),
- (r'\n', Text),
- ],
- 'block_': [
- include('expr'),
- (r'\n', Text, 'block'),
- ],
- 'block': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(block)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'ifdef_': [
- include('expr'),
- (r'\n', Text, 'ifdef'),
- ],
- 'ifdef': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(ifdef)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
- (words(('rls', 'dbg'), prefix=r'\b', suffix=r'\b'), Keyword.Constant, 'ifdef_sp'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'ifdef_sp': [
- include('expr'),
- (r'\n', Text, '#pop'),
- ],
- 'if_': [
- include('expr'),
- (r'\n', Text, 'if'),
- ],
- 'if': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(if)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
- (words(('elif', 'else'), prefix=r'\b', suffix=r'\b'), Keyword, 'if_sp'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'if_sp': [
- include('expr'),
- (r'\n', Text, '#pop'),
- ],
- 'switch_': [
- include('expr'),
- (r'\n', Text, 'switch'),
- ],
- 'switch': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(switch)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
- (words(('case', 'default', 'to'), prefix=r'\b', suffix=r'\b'), Keyword, 'switch_sp'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'switch_sp': [
- include('expr'),
- (r'\n', Text, '#pop'),
- ],
- 'while_': [
- include('expr'),
- (r'\n', Text, 'while'),
- ],
- 'while': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(while)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'for_': [
- include('expr'),
- (r'\n', Text, 'for'),
- ],
- 'for': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(for)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'foreach_': [
- include('expr'),
- (r'\n', Text, 'foreach'),
- ],
- 'foreach': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(foreach)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'try_': [
- include('expr'),
- (r'\n', Text, 'try'),
- ],
- 'try': [
- (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(try)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
- (words(('catch', 'finally', 'to'), prefix=r'\b', suffix=r'\b'), Keyword, 'try_sp'),
- include('statement'),
- include('break'),
- include('skip'),
- ],
- 'try_sp': [
- include('expr'),
- (r'\n', Text, '#pop'),
- ],
-
- # Line-statement
- 'break': [
- (r'\b(break)\b([ \t]+)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Other)),
- ],
- 'skip': [
- (r'\b(skip)\b([ \t]+)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Other)),
- ],
- 'alias': [
- include('expr'),
- (r'\n', Text, '#pop'),
- ],
- 'assert': [
- include('expr'),
- (r'\n', Text, '#pop'),
- ],
- 'const': [
- include('expr'),
- (r'\n', Text, '#pop'),
- ],
- 'do': [
- include('expr'),
- (r'\n', Text, '#pop'),
- ],
- 'ret': [
- include('expr'),
- (r'\n', Text, '#pop'),
- ],
- 'throw': [
- include('expr'),
- (r'\n', Text, '#pop'),
- ],
- 'var': [
- include('expr'),
- (r'\n', Text, '#pop'),
- ],
- 'continued_line': [
- include('expr'),
- (r'\n', Text, '#pop'),
- ],
-
- 'expr': [
- # Whitespace / Comment
- include('whitespace'),
-
- # Punctuation
- (r'\(', Punctuation,),
- (r'\)', Punctuation,),
- (r'\[', Punctuation,),
- (r'\]', Punctuation,),
- (r',', Punctuation),
-
- # Keyword
- (words((
- 'true', 'false', 'null', 'inf'
- ), prefix=r'\b', suffix=r'\b'), Keyword.Constant),
- (words((
- 'me'
- ), prefix=r'\b', suffix=r'\b'), Keyword),
- (words((
- 'bit16', 'bit32', 'bit64', 'bit8', 'bool',
- 'char', 'class', 'dict', 'enum', 'float', 'func',
- 'int', 'list', 'queue', 'stack'
- ), prefix=r'\b', suffix=r'\b'), Keyword.Type),
-
- # Number
- (r'\b[0-9]\.[0-9]+(?!\.)(:?e[\+-][0-9]+)?\b', Number.Float),
- (r'\b2#[01]+(?:b(?:8|16|32|64))?\b', Number.Bin),
- (r'\b8#[0-7]+(?:b(?:8|16|32|64))?\b', Number.Oct),
- (r'\b16#[0-9A-F]+(?:b(?:8|16|32|64))?\b', Number.Hex),
- (r'\b[0-9]+(?:b(?:8|16|32|64))?\b', Number.Decimal),
-
- # String / Char
- (r'"', String.Double, 'string'),
- (r"'(?:\\.|.)+?'", String.Char),
-
- # Operator
- (r'(?:\.|\$(?:>|<)?)', Operator),
- (r'(?:\^)', Operator),
- (r'(?:\+|-|!|##?)', Operator),
- (r'(?:\*|/|%)', Operator),
- (r'(?:~)', Operator),
- (r'(?:(?:=|<>)(?:&|\$)?|<=?|>=?)', Operator),
- (r'(?:&)', Operator),
- (r'(?:\|)', Operator),
- (r'(?:\?)', Operator),
- (r'(?::(?::|\+|-|\*|/|%|\^|~)?)', Operator),
-
- # Identifier
- (r"\b([a-zA-Z_][0-9a-zA-Z_]*)(?=@)\b", Name),
- (r"(@)?\b([a-zA-Z_][0-9a-zA-Z_]*)\b", bygroups(Name.Other, Name.Variable)),
- ],
-
- # String
- 'string': [
- (r'(?:\\[^{\n]|[^"\\])+', String.Double),
- (r'\\\{', String.Double, 'toStrInString'),
- (r'"', String.Double, '#pop'),
- ],
- 'toStrInString': [
- include('expr'),
- (r'\}', String.Double, '#pop'),
- ],
- }
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, Number, Punctuation
+
+__all__ = ['KuinLexer']
+
+class KuinLexer(RegexLexer):
+ """
+ For `Kuin <https://github.com/kuina/Kuin>`_ source code
+
+ .. versionadded:: 2.9
+ """
+ name = 'Kuin'
+ aliases = ['kuin']
+ filenames = ['*.kn']
+
+ tokens = {
+ 'root': [
+ include('statement'),
+ ],
+ 'statement': [
+ # Whitespace / Comment
+ include('whitespace'),
+
+ # Block-statement
+ (r'(\+?[ \t]*\*?[ \t]*\bfunc)([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Function), 'func_'),
+ (r'\b(class)([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Class), 'class_'),
+ (r'\b(enum)([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Constant), 'enum_'),
+ (r'\b(block)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'block_'),
+ (r'\b(ifdef)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'ifdef_'),
+ (r'\b(if)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'if_'),
+ (r'\b(switch)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'switch_'),
+ (r'\b(while)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'while_'),
+ (r'\b(for)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'for_'),
+ (r'\b(foreach)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'foreach_'),
+ (r'\b(try)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'try_'),
+
+ # Line-statement
+ (r'\b(do)\b', Keyword, 'do'),
+ (r'(\+?[ \t]*\bvar)\b', Keyword, 'var'),
+ (r'\b(const)\b', Keyword, 'const'),
+ (r'\b(ret)\b', Keyword, 'ret'),
+ (r'\b(throw)\b', Keyword, 'throw'),
+ (r'\b(alias)\b', Keyword, 'alias'),
+ (r'\b(assert)\b', Keyword, 'assert'),
+ (r'\|', Text, 'continued_line'),
+ (r'[ \t]*\n', Text),
+ ],
+
+ # Whitespace / Comment
+ 'whitespace': [
+ (r'^[ \t]*;.*', Comment.Single),
+ (r'[ \t]+(?![; \t])', Text),
+ (r'\{', Comment.Multiline, 'multiline_comment'),
+ ],
+ 'multiline_comment': [
+ (r'\{', Comment.Multiline, 'multiline_comment'),
+ (r'(?:\s*;.*|[^{}\n]+)', Comment.Multiline),
+ (r'\n', Comment.Multiline),
+ (r'\}', Comment.Multiline, '#pop'),
+ ],
+
+ # Block-statement
+ 'func_': [
+ include('expr'),
+ (r'\n', Text, 'func'),
+ ],
+ 'func': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(func)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('statement'),
+ ],
+ 'class_': [
+ include('expr'),
+ (r'\n', Text, 'class'),
+ ],
+ 'class': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(class)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('statement'),
+ ],
+ 'enum_': [
+ include('expr'),
+ (r'\n', Text, 'enum'),
+ ],
+ 'enum': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(enum)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('expr'),
+ (r'\n', Text),
+ ],
+ 'block_': [
+ include('expr'),
+ (r'\n', Text, 'block'),
+ ],
+ 'block': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(block)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'ifdef_': [
+ include('expr'),
+ (r'\n', Text, 'ifdef'),
+ ],
+ 'ifdef': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(ifdef)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ (words(('rls', 'dbg'), prefix=r'\b', suffix=r'\b'), Keyword.Constant, 'ifdef_sp'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'ifdef_sp': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'if_': [
+ include('expr'),
+ (r'\n', Text, 'if'),
+ ],
+ 'if': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(if)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ (words(('elif', 'else'), prefix=r'\b', suffix=r'\b'), Keyword, 'if_sp'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'if_sp': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'switch_': [
+ include('expr'),
+ (r'\n', Text, 'switch'),
+ ],
+ 'switch': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(switch)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ (words(('case', 'default', 'to'), prefix=r'\b', suffix=r'\b'), Keyword, 'switch_sp'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'switch_sp': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'while_': [
+ include('expr'),
+ (r'\n', Text, 'while'),
+ ],
+ 'while': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(while)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'for_': [
+ include('expr'),
+ (r'\n', Text, 'for'),
+ ],
+ 'for': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(for)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'foreach_': [
+ include('expr'),
+ (r'\n', Text, 'foreach'),
+ ],
+ 'foreach': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(foreach)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'try_': [
+ include('expr'),
+ (r'\n', Text, 'try'),
+ ],
+ 'try': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(try)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ (words(('catch', 'finally', 'to'), prefix=r'\b', suffix=r'\b'), Keyword, 'try_sp'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'try_sp': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+
+ # Line-statement
+ 'break': [
+ (r'\b(break)\b([ \t]+)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Other)),
+ ],
+ 'skip': [
+ (r'\b(skip)\b([ \t]+)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Other)),
+ ],
+ 'alias': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'assert': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'const': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'do': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'ret': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'throw': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'var': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'continued_line': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+
+ 'expr': [
+ # Whitespace / Comment
+ include('whitespace'),
+
+ # Punctuation
+ (r'\(', Punctuation,),
+ (r'\)', Punctuation,),
+ (r'\[', Punctuation,),
+ (r'\]', Punctuation,),
+ (r',', Punctuation),
+
+ # Keyword
+ (words((
+ 'true', 'false', 'null', 'inf'
+ ), prefix=r'\b', suffix=r'\b'), Keyword.Constant),
+ (words((
+ 'me'
+ ), prefix=r'\b', suffix=r'\b'), Keyword),
+ (words((
+ 'bit16', 'bit32', 'bit64', 'bit8', 'bool',
+ 'char', 'class', 'dict', 'enum', 'float', 'func',
+ 'int', 'list', 'queue', 'stack'
+ ), prefix=r'\b', suffix=r'\b'), Keyword.Type),
+
+ # Number
+ (r'\b[0-9]\.[0-9]+(?!\.)(:?e[\+-][0-9]+)?\b', Number.Float),
+ (r'\b2#[01]+(?:b(?:8|16|32|64))?\b', Number.Bin),
+ (r'\b8#[0-7]+(?:b(?:8|16|32|64))?\b', Number.Oct),
+ (r'\b16#[0-9A-F]+(?:b(?:8|16|32|64))?\b', Number.Hex),
+ (r'\b[0-9]+(?:b(?:8|16|32|64))?\b', Number.Decimal),
+
+ # String / Char
+ (r'"', String.Double, 'string'),
+ (r"'(?:\\.|.)+?'", String.Char),
+
+ # Operator
+ (r'(?:\.|\$(?:>|<)?)', Operator),
+ (r'(?:\^)', Operator),
+ (r'(?:\+|-|!|##?)', Operator),
+ (r'(?:\*|/|%)', Operator),
+ (r'(?:~)', Operator),
+ (r'(?:(?:=|<>)(?:&|\$)?|<=?|>=?)', Operator),
+ (r'(?:&)', Operator),
+ (r'(?:\|)', Operator),
+ (r'(?:\?)', Operator),
+ (r'(?::(?::|\+|-|\*|/|%|\^|~)?)', Operator),
+
+ # Identifier
+ (r"\b([a-zA-Z_][0-9a-zA-Z_]*)(?=@)\b", Name),
+ (r"(@)?\b([a-zA-Z_][0-9a-zA-Z_]*)\b", bygroups(Name.Other, Name.Variable)),
+ ],
+
+ # String
+ 'string': [
+ (r'(?:\\[^{\n]|[^"\\])+', String.Double),
+ (r'\\\{', String.Double, 'toStrInString'),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'toStrInString': [
+ include('expr'),
+ (r'\}', String.Double, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/lisp.py b/contrib/python/Pygments/py3/pygments/lexers/lisp.py
index 5628e336ca..bef3727fe0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/lisp.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/lisp.py
@@ -4,7 +4,7 @@
Lexers for Lispy languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -407,7 +407,7 @@ class HyLexer(RegexLexer):
# valid names for identifiers
# well, names can only not consist fully of numbers
# but this should be good enough for now
- valid_name = r'(?!#)[\w!$%*+<=>?/.#:-]+'
+ valid_name = r'(?!#)[\w!$%*+<=>?/.#:-]+'
def _multi_escape(entries):
return words(entries, suffix=' ')
@@ -428,7 +428,7 @@ class HyLexer(RegexLexer):
(r'0[xX][a-fA-F0-9]+', Number.Hex),
# strings, symbols and characters
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r"'" + valid_name, String.Symbol),
(r"\\(.|[a-z]+)", String.Char),
(r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
@@ -496,779 +496,779 @@ class RacketLexer(RegexLexer):
# Generated by example.rkt
_keywords = (
- '#%app', '#%datum', '#%declare', '#%expression', '#%module-begin',
- '#%plain-app', '#%plain-lambda', '#%plain-module-begin',
- '#%printing-module-begin', '#%provide', '#%require',
- '#%stratified-body', '#%top', '#%top-interaction',
- '#%variable-reference', '->', '->*', '->*m', '->d', '->dm', '->i',
- '->m', '...', ':do-in', '==', '=>', '_', 'absent', 'abstract',
- 'all-defined-out', 'all-from-out', 'and', 'any', 'augment', 'augment*',
- 'augment-final', 'augment-final*', 'augride', 'augride*', 'begin',
- 'begin-for-syntax', 'begin0', 'case', 'case->', 'case->m',
- 'case-lambda', 'class', 'class*', 'class-field-accessor',
- 'class-field-mutator', 'class/c', 'class/derived', 'combine-in',
- 'combine-out', 'command-line', 'compound-unit', 'compound-unit/infer',
- 'cond', 'cons/dc', 'contract', 'contract-out', 'contract-struct',
- 'contracted', 'define', 'define-compound-unit',
- 'define-compound-unit/infer', 'define-contract-struct',
- 'define-custom-hash-types', 'define-custom-set-types',
- 'define-for-syntax', 'define-local-member-name', 'define-logger',
- 'define-match-expander', 'define-member-name',
- 'define-module-boundary-contract', 'define-namespace-anchor',
- 'define-opt/c', 'define-sequence-syntax', 'define-serializable-class',
- 'define-serializable-class*', 'define-signature',
- 'define-signature-form', 'define-struct', 'define-struct/contract',
- 'define-struct/derived', 'define-syntax', 'define-syntax-rule',
- 'define-syntaxes', 'define-unit', 'define-unit-binding',
- 'define-unit-from-context', 'define-unit/contract',
- 'define-unit/new-import-export', 'define-unit/s', 'define-values',
- 'define-values-for-export', 'define-values-for-syntax',
- 'define-values/invoke-unit', 'define-values/invoke-unit/infer',
- 'define/augment', 'define/augment-final', 'define/augride',
- 'define/contract', 'define/final-prop', 'define/match',
- 'define/overment', 'define/override', 'define/override-final',
- 'define/private', 'define/public', 'define/public-final',
- 'define/pubment', 'define/subexpression-pos-prop',
- 'define/subexpression-pos-prop/name', 'delay', 'delay/idle',
- 'delay/name', 'delay/strict', 'delay/sync', 'delay/thread', 'do',
- 'else', 'except', 'except-in', 'except-out', 'export', 'extends',
- 'failure-cont', 'false', 'false/c', 'field', 'field-bound?', 'file',
- 'flat-murec-contract', 'flat-rec-contract', 'for', 'for*', 'for*/and',
- 'for*/async', 'for*/first', 'for*/fold', 'for*/fold/derived',
- 'for*/hash', 'for*/hasheq', 'for*/hasheqv', 'for*/last', 'for*/list',
- 'for*/lists', 'for*/mutable-set', 'for*/mutable-seteq',
- 'for*/mutable-seteqv', 'for*/or', 'for*/product', 'for*/set',
- 'for*/seteq', 'for*/seteqv', 'for*/stream', 'for*/sum', 'for*/vector',
- 'for*/weak-set', 'for*/weak-seteq', 'for*/weak-seteqv', 'for-label',
- 'for-meta', 'for-syntax', 'for-template', 'for/and', 'for/async',
- 'for/first', 'for/fold', 'for/fold/derived', 'for/hash', 'for/hasheq',
- 'for/hasheqv', 'for/last', 'for/list', 'for/lists', 'for/mutable-set',
- 'for/mutable-seteq', 'for/mutable-seteqv', 'for/or', 'for/product',
- 'for/set', 'for/seteq', 'for/seteqv', 'for/stream', 'for/sum',
- 'for/vector', 'for/weak-set', 'for/weak-seteq', 'for/weak-seteqv',
- 'gen:custom-write', 'gen:dict', 'gen:equal+hash', 'gen:set',
- 'gen:stream', 'generic', 'get-field', 'hash/dc', 'if', 'implies',
- 'import', 'include', 'include-at/relative-to',
- 'include-at/relative-to/reader', 'include/reader', 'inherit',
- 'inherit-field', 'inherit/inner', 'inherit/super', 'init',
- 'init-depend', 'init-field', 'init-rest', 'inner', 'inspect',
- 'instantiate', 'interface', 'interface*', 'invariant-assertion',
- 'invoke-unit', 'invoke-unit/infer', 'lambda', 'lazy', 'let', 'let*',
- 'let*-values', 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc',
- 'let/ec', 'letrec', 'letrec-syntax', 'letrec-syntaxes',
- 'letrec-syntaxes+values', 'letrec-values', 'lib', 'link', 'local',
- 'local-require', 'log-debug', 'log-error', 'log-fatal', 'log-info',
- 'log-warning', 'match', 'match*', 'match*/derived', 'match-define',
- 'match-define-values', 'match-lambda', 'match-lambda*',
- 'match-lambda**', 'match-let', 'match-let*', 'match-let*-values',
- 'match-let-values', 'match-letrec', 'match-letrec-values',
- 'match/derived', 'match/values', 'member-name-key', 'mixin', 'module',
- 'module*', 'module+', 'nand', 'new', 'nor', 'object-contract',
- 'object/c', 'only', 'only-in', 'only-meta-in', 'open', 'opt/c', 'or',
- 'overment', 'overment*', 'override', 'override*', 'override-final',
- 'override-final*', 'parameterize', 'parameterize*',
- 'parameterize-break', 'parametric->/c', 'place', 'place*',
- 'place/context', 'planet', 'prefix', 'prefix-in', 'prefix-out',
- 'private', 'private*', 'prompt-tag/c', 'protect-out', 'provide',
- 'provide-signature-elements', 'provide/contract', 'public', 'public*',
- 'public-final', 'public-final*', 'pubment', 'pubment*', 'quasiquote',
- 'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax',
- 'quote-syntax/prune', 'recontract-out', 'recursive-contract',
- 'relative-in', 'rename', 'rename-in', 'rename-inner', 'rename-out',
- 'rename-super', 'require', 'send', 'send*', 'send+', 'send-generic',
- 'send/apply', 'send/keyword-apply', 'set!', 'set!-values',
- 'set-field!', 'shared', 'stream', 'stream*', 'stream-cons', 'struct',
- 'struct*', 'struct-copy', 'struct-field-index', 'struct-out',
- 'struct/c', 'struct/ctc', 'struct/dc', 'submod', 'super',
- 'super-instantiate', 'super-make-object', 'super-new', 'syntax',
- 'syntax-case', 'syntax-case*', 'syntax-id-rules', 'syntax-rules',
- 'syntax/loc', 'tag', 'this', 'this%', 'thunk', 'thunk*', 'time',
- 'unconstrained-domain->', 'unit', 'unit-from-context', 'unit/c',
- 'unit/new-import-export', 'unit/s', 'unless', 'unquote',
- 'unquote-splicing', 'unsyntax', 'unsyntax-splicing', 'values/drop',
- 'when', 'with-continuation-mark', 'with-contract',
- 'with-contract-continuation-mark', 'with-handlers', 'with-handlers*',
- 'with-method', 'with-syntax', 'λ'
+ '#%app', '#%datum', '#%declare', '#%expression', '#%module-begin',
+ '#%plain-app', '#%plain-lambda', '#%plain-module-begin',
+ '#%printing-module-begin', '#%provide', '#%require',
+ '#%stratified-body', '#%top', '#%top-interaction',
+ '#%variable-reference', '->', '->*', '->*m', '->d', '->dm', '->i',
+ '->m', '...', ':do-in', '==', '=>', '_', 'absent', 'abstract',
+ 'all-defined-out', 'all-from-out', 'and', 'any', 'augment', 'augment*',
+ 'augment-final', 'augment-final*', 'augride', 'augride*', 'begin',
+ 'begin-for-syntax', 'begin0', 'case', 'case->', 'case->m',
+ 'case-lambda', 'class', 'class*', 'class-field-accessor',
+ 'class-field-mutator', 'class/c', 'class/derived', 'combine-in',
+ 'combine-out', 'command-line', 'compound-unit', 'compound-unit/infer',
+ 'cond', 'cons/dc', 'contract', 'contract-out', 'contract-struct',
+ 'contracted', 'define', 'define-compound-unit',
+ 'define-compound-unit/infer', 'define-contract-struct',
+ 'define-custom-hash-types', 'define-custom-set-types',
+ 'define-for-syntax', 'define-local-member-name', 'define-logger',
+ 'define-match-expander', 'define-member-name',
+ 'define-module-boundary-contract', 'define-namespace-anchor',
+ 'define-opt/c', 'define-sequence-syntax', 'define-serializable-class',
+ 'define-serializable-class*', 'define-signature',
+ 'define-signature-form', 'define-struct', 'define-struct/contract',
+ 'define-struct/derived', 'define-syntax', 'define-syntax-rule',
+ 'define-syntaxes', 'define-unit', 'define-unit-binding',
+ 'define-unit-from-context', 'define-unit/contract',
+ 'define-unit/new-import-export', 'define-unit/s', 'define-values',
+ 'define-values-for-export', 'define-values-for-syntax',
+ 'define-values/invoke-unit', 'define-values/invoke-unit/infer',
+ 'define/augment', 'define/augment-final', 'define/augride',
+ 'define/contract', 'define/final-prop', 'define/match',
+ 'define/overment', 'define/override', 'define/override-final',
+ 'define/private', 'define/public', 'define/public-final',
+ 'define/pubment', 'define/subexpression-pos-prop',
+ 'define/subexpression-pos-prop/name', 'delay', 'delay/idle',
+ 'delay/name', 'delay/strict', 'delay/sync', 'delay/thread', 'do',
+ 'else', 'except', 'except-in', 'except-out', 'export', 'extends',
+ 'failure-cont', 'false', 'false/c', 'field', 'field-bound?', 'file',
+ 'flat-murec-contract', 'flat-rec-contract', 'for', 'for*', 'for*/and',
+ 'for*/async', 'for*/first', 'for*/fold', 'for*/fold/derived',
+ 'for*/hash', 'for*/hasheq', 'for*/hasheqv', 'for*/last', 'for*/list',
+ 'for*/lists', 'for*/mutable-set', 'for*/mutable-seteq',
+ 'for*/mutable-seteqv', 'for*/or', 'for*/product', 'for*/set',
+ 'for*/seteq', 'for*/seteqv', 'for*/stream', 'for*/sum', 'for*/vector',
+ 'for*/weak-set', 'for*/weak-seteq', 'for*/weak-seteqv', 'for-label',
+ 'for-meta', 'for-syntax', 'for-template', 'for/and', 'for/async',
+ 'for/first', 'for/fold', 'for/fold/derived', 'for/hash', 'for/hasheq',
+ 'for/hasheqv', 'for/last', 'for/list', 'for/lists', 'for/mutable-set',
+ 'for/mutable-seteq', 'for/mutable-seteqv', 'for/or', 'for/product',
+ 'for/set', 'for/seteq', 'for/seteqv', 'for/stream', 'for/sum',
+ 'for/vector', 'for/weak-set', 'for/weak-seteq', 'for/weak-seteqv',
+ 'gen:custom-write', 'gen:dict', 'gen:equal+hash', 'gen:set',
+ 'gen:stream', 'generic', 'get-field', 'hash/dc', 'if', 'implies',
+ 'import', 'include', 'include-at/relative-to',
+ 'include-at/relative-to/reader', 'include/reader', 'inherit',
+ 'inherit-field', 'inherit/inner', 'inherit/super', 'init',
+ 'init-depend', 'init-field', 'init-rest', 'inner', 'inspect',
+ 'instantiate', 'interface', 'interface*', 'invariant-assertion',
+ 'invoke-unit', 'invoke-unit/infer', 'lambda', 'lazy', 'let', 'let*',
+ 'let*-values', 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc',
+ 'let/ec', 'letrec', 'letrec-syntax', 'letrec-syntaxes',
+ 'letrec-syntaxes+values', 'letrec-values', 'lib', 'link', 'local',
+ 'local-require', 'log-debug', 'log-error', 'log-fatal', 'log-info',
+ 'log-warning', 'match', 'match*', 'match*/derived', 'match-define',
+ 'match-define-values', 'match-lambda', 'match-lambda*',
+ 'match-lambda**', 'match-let', 'match-let*', 'match-let*-values',
+ 'match-let-values', 'match-letrec', 'match-letrec-values',
+ 'match/derived', 'match/values', 'member-name-key', 'mixin', 'module',
+ 'module*', 'module+', 'nand', 'new', 'nor', 'object-contract',
+ 'object/c', 'only', 'only-in', 'only-meta-in', 'open', 'opt/c', 'or',
+ 'overment', 'overment*', 'override', 'override*', 'override-final',
+ 'override-final*', 'parameterize', 'parameterize*',
+ 'parameterize-break', 'parametric->/c', 'place', 'place*',
+ 'place/context', 'planet', 'prefix', 'prefix-in', 'prefix-out',
+ 'private', 'private*', 'prompt-tag/c', 'protect-out', 'provide',
+ 'provide-signature-elements', 'provide/contract', 'public', 'public*',
+ 'public-final', 'public-final*', 'pubment', 'pubment*', 'quasiquote',
+ 'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax',
+ 'quote-syntax/prune', 'recontract-out', 'recursive-contract',
+ 'relative-in', 'rename', 'rename-in', 'rename-inner', 'rename-out',
+ 'rename-super', 'require', 'send', 'send*', 'send+', 'send-generic',
+ 'send/apply', 'send/keyword-apply', 'set!', 'set!-values',
+ 'set-field!', 'shared', 'stream', 'stream*', 'stream-cons', 'struct',
+ 'struct*', 'struct-copy', 'struct-field-index', 'struct-out',
+ 'struct/c', 'struct/ctc', 'struct/dc', 'submod', 'super',
+ 'super-instantiate', 'super-make-object', 'super-new', 'syntax',
+ 'syntax-case', 'syntax-case*', 'syntax-id-rules', 'syntax-rules',
+ 'syntax/loc', 'tag', 'this', 'this%', 'thunk', 'thunk*', 'time',
+ 'unconstrained-domain->', 'unit', 'unit-from-context', 'unit/c',
+ 'unit/new-import-export', 'unit/s', 'unless', 'unquote',
+ 'unquote-splicing', 'unsyntax', 'unsyntax-splicing', 'values/drop',
+ 'when', 'with-continuation-mark', 'with-contract',
+ 'with-contract-continuation-mark', 'with-handlers', 'with-handlers*',
+ 'with-method', 'with-syntax', 'λ'
)
# Generated by example.rkt
_builtins = (
- '*', '*list/c', '+', '-', '/', '<', '</c', '<=', '<=/c', '=', '=/c',
- '>', '>/c', '>=', '>=/c', 'abort-current-continuation', 'abs',
- 'absolute-path?', 'acos', 'add-between', 'add1', 'alarm-evt',
- 'always-evt', 'and/c', 'andmap', 'angle', 'any/c', 'append', 'append*',
- 'append-map', 'apply', 'argmax', 'argmin', 'arithmetic-shift',
- 'arity-at-least', 'arity-at-least-value', 'arity-at-least?',
- 'arity-checking-wrapper', 'arity-includes?', 'arity=?',
- 'arrow-contract-info', 'arrow-contract-info-accepts-arglist',
- 'arrow-contract-info-chaperone-procedure',
- 'arrow-contract-info-check-first-order', 'arrow-contract-info?',
- 'asin', 'assf', 'assoc', 'assq', 'assv', 'atan',
- 'bad-number-of-results', 'banner', 'base->-doms/c', 'base->-rngs/c',
- 'base->?', 'between/c', 'bitwise-and', 'bitwise-bit-field',
- 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not', 'bitwise-xor',
- 'blame-add-car-context', 'blame-add-cdr-context', 'blame-add-context',
- 'blame-add-missing-party', 'blame-add-nth-arg-context',
- 'blame-add-range-context', 'blame-add-unknown-context',
- 'blame-context', 'blame-contract', 'blame-fmt->-string',
- 'blame-missing-party?', 'blame-negative', 'blame-original?',
- 'blame-positive', 'blame-replace-negative', 'blame-source',
- 'blame-swap', 'blame-swapped?', 'blame-update', 'blame-value',
- 'blame?', 'boolean=?', 'boolean?', 'bound-identifier=?', 'box',
- 'box-cas!', 'box-immutable', 'box-immutable/c', 'box/c', 'box?',
- 'break-enabled', 'break-parameterization?', 'break-thread',
- 'build-chaperone-contract-property', 'build-compound-type-name',
- 'build-contract-property', 'build-flat-contract-property',
- 'build-list', 'build-path', 'build-path/convention-type',
- 'build-string', 'build-vector', 'byte-pregexp', 'byte-pregexp?',
- 'byte-ready?', 'byte-regexp', 'byte-regexp?', 'byte?', 'bytes',
- 'bytes->immutable-bytes', 'bytes->list', 'bytes->path',
- 'bytes->path-element', 'bytes->string/latin-1', 'bytes->string/locale',
- 'bytes->string/utf-8', 'bytes-append', 'bytes-append*',
- 'bytes-close-converter', 'bytes-convert', 'bytes-convert-end',
- 'bytes-converter?', 'bytes-copy', 'bytes-copy!',
- 'bytes-environment-variable-name?', 'bytes-fill!', 'bytes-join',
- 'bytes-length', 'bytes-no-nuls?', 'bytes-open-converter', 'bytes-ref',
- 'bytes-set!', 'bytes-utf-8-index', 'bytes-utf-8-length',
- 'bytes-utf-8-ref', 'bytes<?', 'bytes=?', 'bytes>?', 'bytes?', 'caaaar',
- 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar',
- 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr',
- 'call-in-nested-thread', 'call-with-atomic-output-file',
- 'call-with-break-parameterization',
- 'call-with-composable-continuation', 'call-with-continuation-barrier',
- 'call-with-continuation-prompt', 'call-with-current-continuation',
- 'call-with-default-reading-parameterization',
- 'call-with-escape-continuation', 'call-with-exception-handler',
- 'call-with-file-lock/timeout', 'call-with-immediate-continuation-mark',
- 'call-with-input-bytes', 'call-with-input-file',
- 'call-with-input-file*', 'call-with-input-string',
- 'call-with-output-bytes', 'call-with-output-file',
- 'call-with-output-file*', 'call-with-output-string',
- 'call-with-parameterization', 'call-with-semaphore',
- 'call-with-semaphore/enable-break', 'call-with-values', 'call/cc',
- 'call/ec', 'car', 'cartesian-product', 'cdaaar', 'cdaadr', 'cdaar',
- 'cdadar', 'cdaddr', 'cdadr', 'cdar', 'cddaar', 'cddadr', 'cddar',
- 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr', 'ceiling', 'channel-get',
- 'channel-put', 'channel-put-evt', 'channel-put-evt?',
- 'channel-try-get', 'channel/c', 'channel?', 'chaperone-box',
- 'chaperone-channel', 'chaperone-continuation-mark-key',
- 'chaperone-contract-property?', 'chaperone-contract?', 'chaperone-evt',
- 'chaperone-hash', 'chaperone-hash-set', 'chaperone-of?',
- 'chaperone-procedure', 'chaperone-procedure*', 'chaperone-prompt-tag',
- 'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector',
- 'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?',
- 'char-ci<=?', 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?',
- 'char-downcase', 'char-foldcase', 'char-general-category',
- 'char-graphic?', 'char-in', 'char-in/c', 'char-iso-control?',
- 'char-lower-case?', 'char-numeric?', 'char-punctuation?',
- 'char-ready?', 'char-symbolic?', 'char-title-case?', 'char-titlecase',
- 'char-upcase', 'char-upper-case?', 'char-utf-8-length',
- 'char-whitespace?', 'char<=?', 'char<?', 'char=?', 'char>=?', 'char>?',
- 'char?', 'check-duplicate-identifier', 'check-duplicates',
- 'checked-procedure-check-and-extract', 'choice-evt',
- 'class->interface', 'class-info', 'class-seal', 'class-unseal',
- 'class?', 'cleanse-path', 'close-input-port', 'close-output-port',
- 'coerce-chaperone-contract', 'coerce-chaperone-contracts',
- 'coerce-contract', 'coerce-contract/f', 'coerce-contracts',
- 'coerce-flat-contract', 'coerce-flat-contracts', 'collect-garbage',
- 'collection-file-path', 'collection-path', 'combinations', 'compile',
- 'compile-allow-set!-undefined', 'compile-context-preservation-enabled',
- 'compile-enforce-module-constants', 'compile-syntax',
- 'compiled-expression-recompile', 'compiled-expression?',
- 'compiled-module-expression?', 'complete-path?', 'complex?', 'compose',
- 'compose1', 'conjoin', 'conjugate', 'cons', 'cons/c', 'cons?', 'const',
- 'continuation-mark-key/c', 'continuation-mark-key?',
- 'continuation-mark-set->context', 'continuation-mark-set->list',
- 'continuation-mark-set->list*', 'continuation-mark-set-first',
- 'continuation-mark-set?', 'continuation-marks',
- 'continuation-prompt-available?', 'continuation-prompt-tag?',
- 'continuation?', 'contract-continuation-mark-key',
- 'contract-custom-write-property-proc', 'contract-exercise',
- 'contract-first-order', 'contract-first-order-passes?',
- 'contract-late-neg-projection', 'contract-name', 'contract-proc',
- 'contract-projection', 'contract-property?',
- 'contract-random-generate', 'contract-random-generate-fail',
- 'contract-random-generate-fail?',
- 'contract-random-generate-get-current-environment',
- 'contract-random-generate-stash', 'contract-random-generate/choose',
- 'contract-stronger?', 'contract-struct-exercise',
- 'contract-struct-generate', 'contract-struct-late-neg-projection',
- 'contract-struct-list-contract?', 'contract-val-first-projection',
- 'contract?', 'convert-stream', 'copy-directory/files', 'copy-file',
- 'copy-port', 'cos', 'cosh', 'count', 'current-blame-format',
- 'current-break-parameterization', 'current-code-inspector',
- 'current-command-line-arguments', 'current-compile',
- 'current-compiled-file-roots', 'current-continuation-marks',
- 'current-contract-region', 'current-custodian', 'current-directory',
- 'current-directory-for-user', 'current-drive',
- 'current-environment-variables', 'current-error-port', 'current-eval',
- 'current-evt-pseudo-random-generator',
- 'current-force-delete-permissions', 'current-future',
- 'current-gc-milliseconds', 'current-get-interaction-input-port',
- 'current-inexact-milliseconds', 'current-input-port',
- 'current-inspector', 'current-library-collection-links',
- 'current-library-collection-paths', 'current-load',
- 'current-load-extension', 'current-load-relative-directory',
- 'current-load/use-compiled', 'current-locale', 'current-logger',
- 'current-memory-use', 'current-milliseconds',
- 'current-module-declare-name', 'current-module-declare-source',
- 'current-module-name-resolver', 'current-module-path-for-load',
- 'current-namespace', 'current-output-port', 'current-parameterization',
- 'current-plumber', 'current-preserved-thread-cell-values',
- 'current-print', 'current-process-milliseconds', 'current-prompt-read',
- 'current-pseudo-random-generator', 'current-read-interaction',
- 'current-reader-guard', 'current-readtable', 'current-seconds',
- 'current-security-guard', 'current-subprocess-custodian-mode',
- 'current-thread', 'current-thread-group',
- 'current-thread-initial-stack-size',
- 'current-write-relative-directory', 'curry', 'curryr',
- 'custodian-box-value', 'custodian-box?', 'custodian-limit-memory',
- 'custodian-managed-list', 'custodian-memory-accounting-available?',
- 'custodian-require-memory', 'custodian-shutdown-all', 'custodian?',
- 'custom-print-quotable-accessor', 'custom-print-quotable?',
- 'custom-write-accessor', 'custom-write-property-proc', 'custom-write?',
- 'date', 'date*', 'date*-nanosecond', 'date*-time-zone-name', 'date*?',
- 'date-day', 'date-dst?', 'date-hour', 'date-minute', 'date-month',
- 'date-second', 'date-time-zone-offset', 'date-week-day', 'date-year',
- 'date-year-day', 'date?', 'datum->syntax', 'datum-intern-literal',
- 'default-continuation-prompt-tag', 'degrees->radians',
- 'delete-directory', 'delete-directory/files', 'delete-file',
- 'denominator', 'dict->list', 'dict-can-functional-set?',
- 'dict-can-remove-keys?', 'dict-clear', 'dict-clear!', 'dict-copy',
- 'dict-count', 'dict-empty?', 'dict-for-each', 'dict-has-key?',
- 'dict-implements/c', 'dict-implements?', 'dict-iter-contract',
- 'dict-iterate-first', 'dict-iterate-key', 'dict-iterate-next',
- 'dict-iterate-value', 'dict-key-contract', 'dict-keys', 'dict-map',
- 'dict-mutable?', 'dict-ref', 'dict-ref!', 'dict-remove',
- 'dict-remove!', 'dict-set', 'dict-set!', 'dict-set*', 'dict-set*!',
- 'dict-update', 'dict-update!', 'dict-value-contract', 'dict-values',
- 'dict?', 'directory-exists?', 'directory-list', 'disjoin', 'display',
- 'display-lines', 'display-lines-to-file', 'display-to-file',
- 'displayln', 'double-flonum?', 'drop', 'drop-common-prefix',
- 'drop-right', 'dropf', 'dropf-right', 'dump-memory-stats',
- 'dup-input-port', 'dup-output-port', 'dynamic->*', 'dynamic-get-field',
- 'dynamic-object/c', 'dynamic-place', 'dynamic-place*',
- 'dynamic-require', 'dynamic-require-for-syntax', 'dynamic-send',
- 'dynamic-set-field!', 'dynamic-wind', 'eighth', 'empty',
- 'empty-sequence', 'empty-stream', 'empty?',
- 'environment-variables-copy', 'environment-variables-names',
- 'environment-variables-ref', 'environment-variables-set!',
- 'environment-variables?', 'eof', 'eof-evt', 'eof-object?',
- 'ephemeron-value', 'ephemeron?', 'eprintf', 'eq-contract-val',
- 'eq-contract?', 'eq-hash-code', 'eq?', 'equal-contract-val',
- 'equal-contract?', 'equal-hash-code', 'equal-secondary-hash-code',
- 'equal<%>', 'equal?', 'equal?/recur', 'eqv-hash-code', 'eqv?', 'error',
- 'error-display-handler', 'error-escape-handler',
- 'error-print-context-length', 'error-print-source-location',
- 'error-print-width', 'error-value->string-handler', 'eval',
- 'eval-jit-enabled', 'eval-syntax', 'even?', 'evt/c', 'evt?',
- 'exact->inexact', 'exact-ceiling', 'exact-floor', 'exact-integer?',
- 'exact-nonnegative-integer?', 'exact-positive-integer?', 'exact-round',
- 'exact-truncate', 'exact?', 'executable-yield-handler', 'exit',
- 'exit-handler', 'exn', 'exn-continuation-marks', 'exn-message',
- 'exn:break', 'exn:break-continuation', 'exn:break:hang-up',
- 'exn:break:hang-up?', 'exn:break:terminate', 'exn:break:terminate?',
- 'exn:break?', 'exn:fail', 'exn:fail:contract',
- 'exn:fail:contract:arity', 'exn:fail:contract:arity?',
- 'exn:fail:contract:blame', 'exn:fail:contract:blame-object',
- 'exn:fail:contract:blame?', 'exn:fail:contract:continuation',
- 'exn:fail:contract:continuation?', 'exn:fail:contract:divide-by-zero',
- 'exn:fail:contract:divide-by-zero?',
- 'exn:fail:contract:non-fixnum-result',
- 'exn:fail:contract:non-fixnum-result?', 'exn:fail:contract:variable',
- 'exn:fail:contract:variable-id', 'exn:fail:contract:variable?',
- 'exn:fail:contract?', 'exn:fail:filesystem',
- 'exn:fail:filesystem:errno', 'exn:fail:filesystem:errno-errno',
- 'exn:fail:filesystem:errno?', 'exn:fail:filesystem:exists',
- 'exn:fail:filesystem:exists?', 'exn:fail:filesystem:missing-module',
- 'exn:fail:filesystem:missing-module-path',
- 'exn:fail:filesystem:missing-module?', 'exn:fail:filesystem:version',
- 'exn:fail:filesystem:version?', 'exn:fail:filesystem?',
- 'exn:fail:network', 'exn:fail:network:errno',
- 'exn:fail:network:errno-errno', 'exn:fail:network:errno?',
- 'exn:fail:network?', 'exn:fail:object', 'exn:fail:object?',
- 'exn:fail:out-of-memory', 'exn:fail:out-of-memory?', 'exn:fail:read',
- 'exn:fail:read-srclocs', 'exn:fail:read:eof', 'exn:fail:read:eof?',
- 'exn:fail:read:non-char', 'exn:fail:read:non-char?', 'exn:fail:read?',
- 'exn:fail:syntax', 'exn:fail:syntax-exprs',
- 'exn:fail:syntax:missing-module',
- 'exn:fail:syntax:missing-module-path',
- 'exn:fail:syntax:missing-module?', 'exn:fail:syntax:unbound',
- 'exn:fail:syntax:unbound?', 'exn:fail:syntax?', 'exn:fail:unsupported',
- 'exn:fail:unsupported?', 'exn:fail:user', 'exn:fail:user?',
- 'exn:fail?', 'exn:misc:match?', 'exn:missing-module-accessor',
- 'exn:missing-module?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?',
- 'exp', 'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once',
- 'expand-syntax-to-top-form', 'expand-to-top-form', 'expand-user-path',
- 'explode-path', 'expt', 'externalizable<%>', 'failure-result/c',
- 'false?', 'field-names', 'fifth', 'file->bytes', 'file->bytes-lines',
- 'file->lines', 'file->list', 'file->string', 'file->value',
- 'file-exists?', 'file-name-from-path', 'file-or-directory-identity',
- 'file-or-directory-modify-seconds', 'file-or-directory-permissions',
- 'file-position', 'file-position*', 'file-size',
- 'file-stream-buffer-mode', 'file-stream-port?', 'file-truncate',
- 'filename-extension', 'filesystem-change-evt',
- 'filesystem-change-evt-cancel', 'filesystem-change-evt?',
- 'filesystem-root-list', 'filter', 'filter-map', 'filter-not',
- 'filter-read-input-port', 'find-executable-path', 'find-files',
- 'find-library-collection-links', 'find-library-collection-paths',
- 'find-relative-path', 'find-system-path', 'findf', 'first',
- 'first-or/c', 'fixnum?', 'flat-contract', 'flat-contract-predicate',
- 'flat-contract-property?', 'flat-contract?', 'flat-named-contract',
- 'flatten', 'floating-point-bytes->real', 'flonum?', 'floor',
- 'flush-output', 'fold-files', 'foldl', 'foldr', 'for-each', 'force',
- 'format', 'fourth', 'fprintf', 'free-identifier=?',
- 'free-label-identifier=?', 'free-template-identifier=?',
- 'free-transformer-identifier=?', 'fsemaphore-count', 'fsemaphore-post',
- 'fsemaphore-try-wait?', 'fsemaphore-wait', 'fsemaphore?', 'future',
- 'future?', 'futures-enabled?', 'gcd', 'generate-member-key',
- 'generate-temporaries', 'generic-set?', 'generic?', 'gensym',
- 'get-output-bytes', 'get-output-string', 'get-preference',
- 'get/build-late-neg-projection', 'get/build-val-first-projection',
- 'getenv', 'global-port-print-handler', 'group-by', 'group-execute-bit',
- 'group-read-bit', 'group-write-bit', 'guard-evt', 'handle-evt',
- 'handle-evt?', 'has-blame?', 'has-contract?', 'hash', 'hash->list',
- 'hash-clear', 'hash-clear!', 'hash-copy', 'hash-copy-clear',
- 'hash-count', 'hash-empty?', 'hash-eq?', 'hash-equal?', 'hash-eqv?',
- 'hash-for-each', 'hash-has-key?', 'hash-iterate-first',
- 'hash-iterate-key', 'hash-iterate-key+value', 'hash-iterate-next',
- 'hash-iterate-pair', 'hash-iterate-value', 'hash-keys', 'hash-map',
- 'hash-placeholder?', 'hash-ref', 'hash-ref!', 'hash-remove',
- 'hash-remove!', 'hash-set', 'hash-set!', 'hash-set*', 'hash-set*!',
- 'hash-update', 'hash-update!', 'hash-values', 'hash-weak?', 'hash/c',
- 'hash?', 'hasheq', 'hasheqv', 'identifier-binding',
- 'identifier-binding-symbol', 'identifier-label-binding',
- 'identifier-prune-lexical-context',
- 'identifier-prune-to-source-module',
- 'identifier-remove-from-definition-context',
- 'identifier-template-binding', 'identifier-transformer-binding',
- 'identifier?', 'identity', 'if/c', 'imag-part', 'immutable?',
- 'impersonate-box', 'impersonate-channel',
- 'impersonate-continuation-mark-key', 'impersonate-hash',
- 'impersonate-hash-set', 'impersonate-procedure',
- 'impersonate-procedure*', 'impersonate-prompt-tag',
- 'impersonate-struct', 'impersonate-vector', 'impersonator-contract?',
- 'impersonator-ephemeron', 'impersonator-of?',
- 'impersonator-prop:application-mark', 'impersonator-prop:blame',
- 'impersonator-prop:contracted',
- 'impersonator-property-accessor-procedure?', 'impersonator-property?',
- 'impersonator?', 'implementation?', 'implementation?/c', 'in-bytes',
- 'in-bytes-lines', 'in-combinations', 'in-cycle', 'in-dict',
- 'in-dict-keys', 'in-dict-pairs', 'in-dict-values', 'in-directory',
- 'in-hash', 'in-hash-keys', 'in-hash-pairs', 'in-hash-values',
- 'in-immutable-hash', 'in-immutable-hash-keys',
- 'in-immutable-hash-pairs', 'in-immutable-hash-values',
- 'in-immutable-set', 'in-indexed', 'in-input-port-bytes',
- 'in-input-port-chars', 'in-lines', 'in-list', 'in-mlist',
- 'in-mutable-hash', 'in-mutable-hash-keys', 'in-mutable-hash-pairs',
- 'in-mutable-hash-values', 'in-mutable-set', 'in-naturals',
- 'in-parallel', 'in-permutations', 'in-port', 'in-producer', 'in-range',
- 'in-sequences', 'in-set', 'in-slice', 'in-stream', 'in-string',
- 'in-syntax', 'in-value', 'in-values*-sequence', 'in-values-sequence',
- 'in-vector', 'in-weak-hash', 'in-weak-hash-keys', 'in-weak-hash-pairs',
- 'in-weak-hash-values', 'in-weak-set', 'inexact->exact',
- 'inexact-real?', 'inexact?', 'infinite?', 'input-port-append',
- 'input-port?', 'inspector?', 'instanceof/c', 'integer->char',
- 'integer->integer-bytes', 'integer-bytes->integer', 'integer-in',
- 'integer-length', 'integer-sqrt', 'integer-sqrt/remainder', 'integer?',
- 'interface->method-names', 'interface-extension?', 'interface?',
- 'internal-definition-context-binding-identifiers',
- 'internal-definition-context-introduce',
- 'internal-definition-context-seal', 'internal-definition-context?',
- 'is-a?', 'is-a?/c', 'keyword->string', 'keyword-apply', 'keyword<?',
- 'keyword?', 'keywords-match', 'kill-thread', 'last', 'last-pair',
- 'lcm', 'length', 'liberal-define-context?', 'link-exists?', 'list',
- 'list*', 'list*of', 'list->bytes', 'list->mutable-set',
- 'list->mutable-seteq', 'list->mutable-seteqv', 'list->set',
- 'list->seteq', 'list->seteqv', 'list->string', 'list->vector',
- 'list->weak-set', 'list->weak-seteq', 'list->weak-seteqv',
- 'list-contract?', 'list-prefix?', 'list-ref', 'list-set', 'list-tail',
- 'list-update', 'list/c', 'list?', 'listen-port-number?', 'listof',
- 'load', 'load-extension', 'load-on-demand-enabled', 'load-relative',
- 'load-relative-extension', 'load/cd', 'load/use-compiled',
- 'local-expand', 'local-expand/capture-lifts',
- 'local-transformer-expand', 'local-transformer-expand/capture-lifts',
- 'locale-string-encoding', 'log', 'log-all-levels', 'log-level-evt',
- 'log-level?', 'log-max-level', 'log-message', 'log-receiver?',
- 'logger-name', 'logger?', 'magnitude', 'make-arity-at-least',
- 'make-base-empty-namespace', 'make-base-namespace', 'make-bytes',
- 'make-channel', 'make-chaperone-contract',
- 'make-continuation-mark-key', 'make-continuation-prompt-tag',
- 'make-contract', 'make-custodian', 'make-custodian-box',
- 'make-custom-hash', 'make-custom-hash-types', 'make-custom-set',
- 'make-custom-set-types', 'make-date', 'make-date*',
- 'make-derived-parameter', 'make-directory', 'make-directory*',
- 'make-do-sequence', 'make-empty-namespace',
- 'make-environment-variables', 'make-ephemeron', 'make-exn',
- 'make-exn:break', 'make-exn:break:hang-up', 'make-exn:break:terminate',
- 'make-exn:fail', 'make-exn:fail:contract',
- 'make-exn:fail:contract:arity', 'make-exn:fail:contract:blame',
- 'make-exn:fail:contract:continuation',
- 'make-exn:fail:contract:divide-by-zero',
- 'make-exn:fail:contract:non-fixnum-result',
- 'make-exn:fail:contract:variable', 'make-exn:fail:filesystem',
- 'make-exn:fail:filesystem:errno', 'make-exn:fail:filesystem:exists',
- 'make-exn:fail:filesystem:missing-module',
- 'make-exn:fail:filesystem:version', 'make-exn:fail:network',
- 'make-exn:fail:network:errno', 'make-exn:fail:object',
- 'make-exn:fail:out-of-memory', 'make-exn:fail:read',
- 'make-exn:fail:read:eof', 'make-exn:fail:read:non-char',
- 'make-exn:fail:syntax', 'make-exn:fail:syntax:missing-module',
- 'make-exn:fail:syntax:unbound', 'make-exn:fail:unsupported',
- 'make-exn:fail:user', 'make-file-or-directory-link',
- 'make-flat-contract', 'make-fsemaphore', 'make-generic',
- 'make-handle-get-preference-locked', 'make-hash',
- 'make-hash-placeholder', 'make-hasheq', 'make-hasheq-placeholder',
- 'make-hasheqv', 'make-hasheqv-placeholder',
- 'make-immutable-custom-hash', 'make-immutable-hash',
- 'make-immutable-hasheq', 'make-immutable-hasheqv',
- 'make-impersonator-property', 'make-input-port',
- 'make-input-port/read-to-peek', 'make-inspector',
- 'make-keyword-procedure', 'make-known-char-range-list',
- 'make-limited-input-port', 'make-list', 'make-lock-file-name',
- 'make-log-receiver', 'make-logger', 'make-mixin-contract',
- 'make-mutable-custom-set', 'make-none/c', 'make-object',
- 'make-output-port', 'make-parameter', 'make-parent-directory*',
- 'make-phantom-bytes', 'make-pipe', 'make-pipe-with-specials',
- 'make-placeholder', 'make-plumber', 'make-polar', 'make-prefab-struct',
- 'make-primitive-class', 'make-proj-contract',
- 'make-pseudo-random-generator', 'make-reader-graph', 'make-readtable',
- 'make-rectangular', 'make-rename-transformer',
- 'make-resolved-module-path', 'make-security-guard', 'make-semaphore',
- 'make-set!-transformer', 'make-shared-bytes', 'make-sibling-inspector',
- 'make-special-comment', 'make-srcloc', 'make-string',
- 'make-struct-field-accessor', 'make-struct-field-mutator',
- 'make-struct-type', 'make-struct-type-property',
- 'make-syntax-delta-introducer', 'make-syntax-introducer',
- 'make-temporary-file', 'make-tentative-pretty-print-output-port',
- 'make-thread-cell', 'make-thread-group', 'make-vector',
- 'make-weak-box', 'make-weak-custom-hash', 'make-weak-custom-set',
- 'make-weak-hash', 'make-weak-hasheq', 'make-weak-hasheqv',
- 'make-will-executor', 'map', 'match-equality-test',
- 'matches-arity-exactly?', 'max', 'mcar', 'mcdr', 'mcons', 'member',
- 'member-name-key-hash-code', 'member-name-key=?', 'member-name-key?',
- 'memf', 'memq', 'memv', 'merge-input', 'method-in-interface?', 'min',
- 'mixin-contract', 'module->exports', 'module->imports',
- 'module->language-info', 'module->namespace',
- 'module-compiled-cross-phase-persistent?', 'module-compiled-exports',
- 'module-compiled-imports', 'module-compiled-language-info',
- 'module-compiled-name', 'module-compiled-submodules',
- 'module-declared?', 'module-path-index-join',
- 'module-path-index-resolve', 'module-path-index-split',
- 'module-path-index-submodule', 'module-path-index?', 'module-path?',
- 'module-predefined?', 'module-provide-protected?', 'modulo', 'mpair?',
- 'mutable-set', 'mutable-seteq', 'mutable-seteqv', 'n->th',
- 'nack-guard-evt', 'namespace-anchor->empty-namespace',
- 'namespace-anchor->namespace', 'namespace-anchor?',
- 'namespace-attach-module', 'namespace-attach-module-declaration',
- 'namespace-base-phase', 'namespace-mapped-symbols',
- 'namespace-module-identifier', 'namespace-module-registry',
- 'namespace-require', 'namespace-require/constant',
- 'namespace-require/copy', 'namespace-require/expansion-time',
- 'namespace-set-variable-value!', 'namespace-symbol->identifier',
- 'namespace-syntax-introduce', 'namespace-undefine-variable!',
- 'namespace-unprotect-module', 'namespace-variable-value', 'namespace?',
- 'nan?', 'natural-number/c', 'negate', 'negative?', 'never-evt',
- 'new-∀/c', 'new-∃/c', 'newline', 'ninth', 'non-empty-listof',
- 'non-empty-string?', 'none/c', 'normal-case-path', 'normalize-arity',
- 'normalize-path', 'normalized-arity?', 'not', 'not/c', 'null', 'null?',
- 'number->string', 'number?', 'numerator', 'object%', 'object->vector',
- 'object-info', 'object-interface', 'object-method-arity-includes?',
- 'object-name', 'object-or-false=?', 'object=?', 'object?', 'odd?',
- 'one-of/c', 'open-input-bytes', 'open-input-file',
- 'open-input-output-file', 'open-input-string', 'open-output-bytes',
- 'open-output-file', 'open-output-nowhere', 'open-output-string',
- 'or/c', 'order-of-magnitude', 'ormap', 'other-execute-bit',
- 'other-read-bit', 'other-write-bit', 'output-port?', 'pair?',
- 'parameter-procedure=?', 'parameter/c', 'parameter?',
- 'parameterization?', 'parse-command-line', 'partition', 'path->bytes',
- 'path->complete-path', 'path->directory-path', 'path->string',
- 'path-add-suffix', 'path-convention-type', 'path-element->bytes',
- 'path-element->string', 'path-element?', 'path-for-some-system?',
- 'path-list-string->path-list', 'path-only', 'path-replace-suffix',
- 'path-string?', 'path<?', 'path?', 'pathlist-closure', 'peek-byte',
- 'peek-byte-or-special', 'peek-bytes', 'peek-bytes!', 'peek-bytes!-evt',
- 'peek-bytes-avail!', 'peek-bytes-avail!*', 'peek-bytes-avail!-evt',
- 'peek-bytes-avail!/enable-break', 'peek-bytes-evt', 'peek-char',
- 'peek-char-or-special', 'peek-string', 'peek-string!',
- 'peek-string!-evt', 'peek-string-evt', 'peeking-input-port',
- 'permutations', 'phantom-bytes?', 'pi', 'pi.f', 'pipe-content-length',
- 'place-break', 'place-channel', 'place-channel-get',
- 'place-channel-put', 'place-channel-put/get', 'place-channel?',
- 'place-dead-evt', 'place-enabled?', 'place-kill', 'place-location?',
- 'place-message-allowed?', 'place-sleep', 'place-wait', 'place?',
- 'placeholder-get', 'placeholder-set!', 'placeholder?',
- 'plumber-add-flush!', 'plumber-flush-all',
- 'plumber-flush-handle-remove!', 'plumber-flush-handle?', 'plumber?',
- 'poll-guard-evt', 'port->bytes', 'port->bytes-lines', 'port->lines',
- 'port->list', 'port->string', 'port-closed-evt', 'port-closed?',
- 'port-commit-peeked', 'port-count-lines!', 'port-count-lines-enabled',
- 'port-counts-lines?', 'port-display-handler', 'port-file-identity',
- 'port-file-unlock', 'port-next-location', 'port-number?',
- 'port-print-handler', 'port-progress-evt',
- 'port-provides-progress-evts?', 'port-read-handler',
- 'port-try-file-lock?', 'port-write-handler', 'port-writes-atomic?',
- 'port-writes-special?', 'port?', 'positive?', 'predicate/c',
- 'prefab-key->struct-type', 'prefab-key?', 'prefab-struct-key',
- 'preferences-lock-file-mode', 'pregexp', 'pregexp?', 'pretty-display',
- 'pretty-format', 'pretty-print', 'pretty-print-.-symbol-without-bars',
- 'pretty-print-abbreviate-read-macros', 'pretty-print-columns',
- 'pretty-print-current-style-table', 'pretty-print-depth',
- 'pretty-print-exact-as-decimal', 'pretty-print-extend-style-table',
- 'pretty-print-handler', 'pretty-print-newline',
- 'pretty-print-post-print-hook', 'pretty-print-pre-print-hook',
- 'pretty-print-print-hook', 'pretty-print-print-line',
- 'pretty-print-remap-stylable', 'pretty-print-show-inexactness',
- 'pretty-print-size-hook', 'pretty-print-style-table?',
- 'pretty-printing', 'pretty-write', 'primitive-closure?',
- 'primitive-result-arity', 'primitive?', 'print', 'print-as-expression',
- 'print-boolean-long-form', 'print-box', 'print-graph',
- 'print-hash-table', 'print-mpair-curly-braces',
- 'print-pair-curly-braces', 'print-reader-abbreviations',
- 'print-struct', 'print-syntax-width', 'print-unreadable',
- 'print-vector-length', 'printable/c', 'printable<%>', 'printf',
- 'println', 'procedure->method', 'procedure-arity',
- 'procedure-arity-includes/c', 'procedure-arity-includes?',
- 'procedure-arity?', 'procedure-closure-contents-eq?',
- 'procedure-extract-target', 'procedure-keywords',
- 'procedure-reduce-arity', 'procedure-reduce-keyword-arity',
- 'procedure-rename', 'procedure-result-arity', 'procedure-specialize',
- 'procedure-struct-type?', 'procedure?', 'process', 'process*',
- 'process*/ports', 'process/ports', 'processor-count', 'progress-evt?',
- 'promise-forced?', 'promise-running?', 'promise/c', 'promise/name?',
- 'promise?', 'prop:arity-string', 'prop:arrow-contract',
- 'prop:arrow-contract-get-info', 'prop:arrow-contract?', 'prop:blame',
- 'prop:chaperone-contract', 'prop:checked-procedure', 'prop:contract',
- 'prop:contracted', 'prop:custom-print-quotable', 'prop:custom-write',
- 'prop:dict', 'prop:dict/contract', 'prop:equal+hash', 'prop:evt',
- 'prop:exn:missing-module', 'prop:exn:srclocs',
- 'prop:expansion-contexts', 'prop:flat-contract',
- 'prop:impersonator-of', 'prop:input-port',
- 'prop:liberal-define-context', 'prop:object-name',
- 'prop:opt-chaperone-contract', 'prop:opt-chaperone-contract-get-test',
- 'prop:opt-chaperone-contract?', 'prop:orc-contract',
- 'prop:orc-contract-get-subcontracts', 'prop:orc-contract?',
- 'prop:output-port', 'prop:place-location', 'prop:procedure',
- 'prop:recursive-contract', 'prop:recursive-contract-unroll',
- 'prop:recursive-contract?', 'prop:rename-transformer', 'prop:sequence',
- 'prop:set!-transformer', 'prop:stream', 'proper-subset?',
- 'pseudo-random-generator->vector', 'pseudo-random-generator-vector?',
- 'pseudo-random-generator?', 'put-preferences', 'putenv', 'quotient',
- 'quotient/remainder', 'radians->degrees', 'raise',
- 'raise-argument-error', 'raise-arguments-error', 'raise-arity-error',
- 'raise-blame-error', 'raise-contract-error', 'raise-mismatch-error',
- 'raise-not-cons-blame-error', 'raise-range-error',
- 'raise-result-error', 'raise-syntax-error', 'raise-type-error',
- 'raise-user-error', 'random', 'random-seed', 'range', 'rational?',
- 'rationalize', 'read', 'read-accept-bar-quote', 'read-accept-box',
- 'read-accept-compiled', 'read-accept-dot', 'read-accept-graph',
- 'read-accept-infix-dot', 'read-accept-lang', 'read-accept-quasiquote',
- 'read-accept-reader', 'read-byte', 'read-byte-or-special',
- 'read-bytes', 'read-bytes!', 'read-bytes!-evt', 'read-bytes-avail!',
- 'read-bytes-avail!*', 'read-bytes-avail!-evt',
- 'read-bytes-avail!/enable-break', 'read-bytes-evt', 'read-bytes-line',
- 'read-bytes-line-evt', 'read-case-sensitive', 'read-cdot', 'read-char',
- 'read-char-or-special', 'read-curly-brace-as-paren',
- 'read-curly-brace-with-tag', 'read-decimal-as-inexact',
- 'read-eval-print-loop', 'read-language', 'read-line', 'read-line-evt',
- 'read-on-demand-source', 'read-square-bracket-as-paren',
- 'read-square-bracket-with-tag', 'read-string', 'read-string!',
- 'read-string!-evt', 'read-string-evt', 'read-syntax',
- 'read-syntax/recursive', 'read/recursive', 'readtable-mapping',
- 'readtable?', 'real->decimal-string', 'real->double-flonum',
- 'real->floating-point-bytes', 'real->single-flonum', 'real-in',
- 'real-part', 'real?', 'reencode-input-port', 'reencode-output-port',
- 'regexp', 'regexp-match', 'regexp-match*', 'regexp-match-evt',
- 'regexp-match-exact?', 'regexp-match-peek',
- 'regexp-match-peek-immediate', 'regexp-match-peek-positions',
- 'regexp-match-peek-positions*',
- 'regexp-match-peek-positions-immediate',
- 'regexp-match-peek-positions-immediate/end',
- 'regexp-match-peek-positions/end', 'regexp-match-positions',
- 'regexp-match-positions*', 'regexp-match-positions/end',
- 'regexp-match/end', 'regexp-match?', 'regexp-max-lookbehind',
- 'regexp-quote', 'regexp-replace', 'regexp-replace*',
- 'regexp-replace-quote', 'regexp-replaces', 'regexp-split',
- 'regexp-try-match', 'regexp?', 'relative-path?', 'relocate-input-port',
- 'relocate-output-port', 'remainder', 'remf', 'remf*', 'remove',
- 'remove*', 'remove-duplicates', 'remq', 'remq*', 'remv', 'remv*',
- 'rename-contract', 'rename-file-or-directory',
- 'rename-transformer-target', 'rename-transformer?', 'replace-evt',
- 'reroot-path', 'resolve-path', 'resolved-module-path-name',
- 'resolved-module-path?', 'rest', 'reverse', 'round', 'second',
- 'seconds->date', 'security-guard?', 'semaphore-peek-evt',
- 'semaphore-peek-evt?', 'semaphore-post', 'semaphore-try-wait?',
- 'semaphore-wait', 'semaphore-wait/enable-break', 'semaphore?',
- 'sequence->list', 'sequence->stream', 'sequence-add-between',
- 'sequence-andmap', 'sequence-append', 'sequence-count',
- 'sequence-filter', 'sequence-fold', 'sequence-for-each',
- 'sequence-generate', 'sequence-generate*', 'sequence-length',
- 'sequence-map', 'sequence-ormap', 'sequence-ref', 'sequence-tail',
- 'sequence/c', 'sequence?', 'set', 'set!-transformer-procedure',
- 'set!-transformer?', 'set->list', 'set->stream', 'set-add', 'set-add!',
- 'set-box!', 'set-clear', 'set-clear!', 'set-copy', 'set-copy-clear',
- 'set-count', 'set-empty?', 'set-eq?', 'set-equal?', 'set-eqv?',
- 'set-first', 'set-for-each', 'set-implements/c', 'set-implements?',
- 'set-intersect', 'set-intersect!', 'set-map', 'set-mcar!', 'set-mcdr!',
- 'set-member?', 'set-mutable?', 'set-phantom-bytes!',
- 'set-port-next-location!', 'set-remove', 'set-remove!', 'set-rest',
- 'set-some-basic-contracts!', 'set-subtract', 'set-subtract!',
- 'set-symmetric-difference', 'set-symmetric-difference!', 'set-union',
- 'set-union!', 'set-weak?', 'set/c', 'set=?', 'set?', 'seteq', 'seteqv',
- 'seventh', 'sgn', 'shared-bytes', 'shell-execute', 'shrink-path-wrt',
- 'shuffle', 'simple-form-path', 'simplify-path', 'sin',
- 'single-flonum?', 'sinh', 'sixth', 'skip-projection-wrapper?', 'sleep',
- 'some-system-path->string', 'sort', 'special-comment-value',
- 'special-comment?', 'special-filter-input-port', 'split-at',
- 'split-at-right', 'split-common-prefix', 'split-path', 'splitf-at',
- 'splitf-at-right', 'sqr', 'sqrt', 'srcloc', 'srcloc->string',
- 'srcloc-column', 'srcloc-line', 'srcloc-position', 'srcloc-source',
- 'srcloc-span', 'srcloc?', 'stop-after', 'stop-before', 'stream->list',
- 'stream-add-between', 'stream-andmap', 'stream-append', 'stream-count',
- 'stream-empty?', 'stream-filter', 'stream-first', 'stream-fold',
- 'stream-for-each', 'stream-length', 'stream-map', 'stream-ormap',
- 'stream-ref', 'stream-rest', 'stream-tail', 'stream/c', 'stream?',
- 'string', 'string->bytes/latin-1', 'string->bytes/locale',
- 'string->bytes/utf-8', 'string->immutable-string', 'string->keyword',
- 'string->list', 'string->number', 'string->path',
- 'string->path-element', 'string->some-system-path', 'string->symbol',
- 'string->uninterned-symbol', 'string->unreadable-symbol',
- 'string-append', 'string-append*', 'string-ci<=?', 'string-ci<?',
- 'string-ci=?', 'string-ci>=?', 'string-ci>?', 'string-contains?',
- 'string-copy', 'string-copy!', 'string-downcase',
- 'string-environment-variable-name?', 'string-fill!', 'string-foldcase',
- 'string-join', 'string-len/c', 'string-length', 'string-locale-ci<?',
- 'string-locale-ci=?', 'string-locale-ci>?', 'string-locale-downcase',
- 'string-locale-upcase', 'string-locale<?', 'string-locale=?',
- 'string-locale>?', 'string-no-nuls?', 'string-normalize-nfc',
- 'string-normalize-nfd', 'string-normalize-nfkc',
- 'string-normalize-nfkd', 'string-normalize-spaces', 'string-port?',
- 'string-prefix?', 'string-ref', 'string-replace', 'string-set!',
- 'string-split', 'string-suffix?', 'string-titlecase', 'string-trim',
- 'string-upcase', 'string-utf-8-length', 'string<=?', 'string<?',
- 'string=?', 'string>=?', 'string>?', 'string?', 'struct->vector',
- 'struct-accessor-procedure?', 'struct-constructor-procedure?',
- 'struct-info', 'struct-mutator-procedure?',
- 'struct-predicate-procedure?', 'struct-type-info',
- 'struct-type-make-constructor', 'struct-type-make-predicate',
- 'struct-type-property-accessor-procedure?', 'struct-type-property/c',
- 'struct-type-property?', 'struct-type?', 'struct:arity-at-least',
- 'struct:arrow-contract-info', 'struct:date', 'struct:date*',
- 'struct:exn', 'struct:exn:break', 'struct:exn:break:hang-up',
- 'struct:exn:break:terminate', 'struct:exn:fail',
- 'struct:exn:fail:contract', 'struct:exn:fail:contract:arity',
- 'struct:exn:fail:contract:blame',
- 'struct:exn:fail:contract:continuation',
- 'struct:exn:fail:contract:divide-by-zero',
- 'struct:exn:fail:contract:non-fixnum-result',
- 'struct:exn:fail:contract:variable', 'struct:exn:fail:filesystem',
- 'struct:exn:fail:filesystem:errno',
- 'struct:exn:fail:filesystem:exists',
- 'struct:exn:fail:filesystem:missing-module',
- 'struct:exn:fail:filesystem:version', 'struct:exn:fail:network',
- 'struct:exn:fail:network:errno', 'struct:exn:fail:object',
- 'struct:exn:fail:out-of-memory', 'struct:exn:fail:read',
- 'struct:exn:fail:read:eof', 'struct:exn:fail:read:non-char',
- 'struct:exn:fail:syntax', 'struct:exn:fail:syntax:missing-module',
- 'struct:exn:fail:syntax:unbound', 'struct:exn:fail:unsupported',
- 'struct:exn:fail:user', 'struct:srcloc',
- 'struct:wrapped-extra-arg-arrow', 'struct?', 'sub1', 'subbytes',
- 'subclass?', 'subclass?/c', 'subprocess', 'subprocess-group-enabled',
- 'subprocess-kill', 'subprocess-pid', 'subprocess-status',
- 'subprocess-wait', 'subprocess?', 'subset?', 'substring', 'suggest/c',
- 'symbol->string', 'symbol-interned?', 'symbol-unreadable?', 'symbol<?',
- 'symbol=?', 'symbol?', 'symbols', 'sync', 'sync/enable-break',
- 'sync/timeout', 'sync/timeout/enable-break', 'syntax->datum',
- 'syntax->list', 'syntax-arm', 'syntax-column', 'syntax-debug-info',
- 'syntax-disarm', 'syntax-e', 'syntax-line',
- 'syntax-local-bind-syntaxes', 'syntax-local-certifier',
- 'syntax-local-context', 'syntax-local-expand-expression',
- 'syntax-local-get-shadower', 'syntax-local-identifier-as-binding',
- 'syntax-local-introduce', 'syntax-local-lift-context',
- 'syntax-local-lift-expression', 'syntax-local-lift-module',
- 'syntax-local-lift-module-end-declaration',
- 'syntax-local-lift-provide', 'syntax-local-lift-require',
- 'syntax-local-lift-values-expression',
- 'syntax-local-make-definition-context',
- 'syntax-local-make-delta-introducer',
- 'syntax-local-module-defined-identifiers',
- 'syntax-local-module-exports',
- 'syntax-local-module-required-identifiers', 'syntax-local-name',
- 'syntax-local-phase-level', 'syntax-local-submodules',
- 'syntax-local-transforming-module-provides?', 'syntax-local-value',
- 'syntax-local-value/immediate', 'syntax-original?', 'syntax-position',
- 'syntax-property', 'syntax-property-preserved?',
- 'syntax-property-symbol-keys', 'syntax-protect', 'syntax-rearm',
- 'syntax-recertify', 'syntax-shift-phase-level', 'syntax-source',
- 'syntax-source-module', 'syntax-span', 'syntax-taint',
- 'syntax-tainted?', 'syntax-track-origin',
- 'syntax-transforming-module-expression?',
- 'syntax-transforming-with-lifts?', 'syntax-transforming?', 'syntax/c',
- 'syntax?', 'system', 'system*', 'system*/exit-code',
- 'system-big-endian?', 'system-idle-evt', 'system-language+country',
- 'system-library-subpath', 'system-path-convention-type', 'system-type',
- 'system/exit-code', 'tail-marks-match?', 'take', 'take-common-prefix',
- 'take-right', 'takef', 'takef-right', 'tan', 'tanh',
- 'tcp-abandon-port', 'tcp-accept', 'tcp-accept-evt',
- 'tcp-accept-ready?', 'tcp-accept/enable-break', 'tcp-addresses',
- 'tcp-close', 'tcp-connect', 'tcp-connect/enable-break', 'tcp-listen',
- 'tcp-listener?', 'tcp-port?', 'tentative-pretty-print-port-cancel',
- 'tentative-pretty-print-port-transfer', 'tenth', 'terminal-port?',
- 'the-unsupplied-arg', 'third', 'thread', 'thread-cell-ref',
- 'thread-cell-set!', 'thread-cell-values?', 'thread-cell?',
- 'thread-dead-evt', 'thread-dead?', 'thread-group?', 'thread-receive',
- 'thread-receive-evt', 'thread-resume', 'thread-resume-evt',
- 'thread-rewind-receive', 'thread-running?', 'thread-send',
- 'thread-suspend', 'thread-suspend-evt', 'thread-try-receive',
- 'thread-wait', 'thread/suspend-to-kill', 'thread?', 'time-apply',
- 'touch', 'transplant-input-port', 'transplant-output-port', 'true',
- 'truncate', 'udp-addresses', 'udp-bind!', 'udp-bound?', 'udp-close',
- 'udp-connect!', 'udp-connected?', 'udp-multicast-interface',
- 'udp-multicast-join-group!', 'udp-multicast-leave-group!',
- 'udp-multicast-loopback?', 'udp-multicast-set-interface!',
- 'udp-multicast-set-loopback!', 'udp-multicast-set-ttl!',
- 'udp-multicast-ttl', 'udp-open-socket', 'udp-receive!',
- 'udp-receive!*', 'udp-receive!-evt', 'udp-receive!/enable-break',
- 'udp-receive-ready-evt', 'udp-send', 'udp-send*', 'udp-send-evt',
- 'udp-send-ready-evt', 'udp-send-to', 'udp-send-to*', 'udp-send-to-evt',
- 'udp-send-to/enable-break', 'udp-send/enable-break', 'udp?', 'unbox',
- 'uncaught-exception-handler', 'unit?', 'unspecified-dom',
- 'unsupplied-arg?', 'use-collection-link-paths',
- 'use-compiled-file-paths', 'use-user-specific-search-paths',
- 'user-execute-bit', 'user-read-bit', 'user-write-bit', 'value-blame',
- 'value-contract', 'values', 'variable-reference->empty-namespace',
- 'variable-reference->module-base-phase',
- 'variable-reference->module-declaration-inspector',
- 'variable-reference->module-path-index',
- 'variable-reference->module-source', 'variable-reference->namespace',
- 'variable-reference->phase',
- 'variable-reference->resolved-module-path',
- 'variable-reference-constant?', 'variable-reference?', 'vector',
- 'vector->immutable-vector', 'vector->list',
- 'vector->pseudo-random-generator', 'vector->pseudo-random-generator!',
- 'vector->values', 'vector-append', 'vector-argmax', 'vector-argmin',
- 'vector-copy', 'vector-copy!', 'vector-count', 'vector-drop',
- 'vector-drop-right', 'vector-fill!', 'vector-filter',
- 'vector-filter-not', 'vector-immutable', 'vector-immutable/c',
- 'vector-immutableof', 'vector-length', 'vector-map', 'vector-map!',
- 'vector-member', 'vector-memq', 'vector-memv', 'vector-ref',
- 'vector-set!', 'vector-set*!', 'vector-set-performance-stats!',
- 'vector-split-at', 'vector-split-at-right', 'vector-take',
- 'vector-take-right', 'vector/c', 'vector?', 'vectorof', 'version',
- 'void', 'void?', 'weak-box-value', 'weak-box?', 'weak-set',
- 'weak-seteq', 'weak-seteqv', 'will-execute', 'will-executor?',
- 'will-register', 'will-try-execute', 'with-input-from-bytes',
- 'with-input-from-file', 'with-input-from-string',
- 'with-output-to-bytes', 'with-output-to-file', 'with-output-to-string',
- 'would-be-future', 'wrap-evt', 'wrapped-extra-arg-arrow',
- 'wrapped-extra-arg-arrow-extra-neg-party-argument',
- 'wrapped-extra-arg-arrow-real-func', 'wrapped-extra-arg-arrow?',
- 'writable<%>', 'write', 'write-byte', 'write-bytes',
- 'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt',
- 'write-bytes-avail/enable-break', 'write-char', 'write-special',
- 'write-special-avail*', 'write-special-evt', 'write-string',
- 'write-to-file', 'writeln', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a',
- '~e', '~r', '~s', '~v'
+ '*', '*list/c', '+', '-', '/', '<', '</c', '<=', '<=/c', '=', '=/c',
+ '>', '>/c', '>=', '>=/c', 'abort-current-continuation', 'abs',
+ 'absolute-path?', 'acos', 'add-between', 'add1', 'alarm-evt',
+ 'always-evt', 'and/c', 'andmap', 'angle', 'any/c', 'append', 'append*',
+ 'append-map', 'apply', 'argmax', 'argmin', 'arithmetic-shift',
+ 'arity-at-least', 'arity-at-least-value', 'arity-at-least?',
+ 'arity-checking-wrapper', 'arity-includes?', 'arity=?',
+ 'arrow-contract-info', 'arrow-contract-info-accepts-arglist',
+ 'arrow-contract-info-chaperone-procedure',
+ 'arrow-contract-info-check-first-order', 'arrow-contract-info?',
+ 'asin', 'assf', 'assoc', 'assq', 'assv', 'atan',
+ 'bad-number-of-results', 'banner', 'base->-doms/c', 'base->-rngs/c',
+ 'base->?', 'between/c', 'bitwise-and', 'bitwise-bit-field',
+ 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not', 'bitwise-xor',
+ 'blame-add-car-context', 'blame-add-cdr-context', 'blame-add-context',
+ 'blame-add-missing-party', 'blame-add-nth-arg-context',
+ 'blame-add-range-context', 'blame-add-unknown-context',
+ 'blame-context', 'blame-contract', 'blame-fmt->-string',
+ 'blame-missing-party?', 'blame-negative', 'blame-original?',
+ 'blame-positive', 'blame-replace-negative', 'blame-source',
+ 'blame-swap', 'blame-swapped?', 'blame-update', 'blame-value',
+ 'blame?', 'boolean=?', 'boolean?', 'bound-identifier=?', 'box',
+ 'box-cas!', 'box-immutable', 'box-immutable/c', 'box/c', 'box?',
+ 'break-enabled', 'break-parameterization?', 'break-thread',
+ 'build-chaperone-contract-property', 'build-compound-type-name',
+ 'build-contract-property', 'build-flat-contract-property',
+ 'build-list', 'build-path', 'build-path/convention-type',
+ 'build-string', 'build-vector', 'byte-pregexp', 'byte-pregexp?',
+ 'byte-ready?', 'byte-regexp', 'byte-regexp?', 'byte?', 'bytes',
+ 'bytes->immutable-bytes', 'bytes->list', 'bytes->path',
+ 'bytes->path-element', 'bytes->string/latin-1', 'bytes->string/locale',
+ 'bytes->string/utf-8', 'bytes-append', 'bytes-append*',
+ 'bytes-close-converter', 'bytes-convert', 'bytes-convert-end',
+ 'bytes-converter?', 'bytes-copy', 'bytes-copy!',
+ 'bytes-environment-variable-name?', 'bytes-fill!', 'bytes-join',
+ 'bytes-length', 'bytes-no-nuls?', 'bytes-open-converter', 'bytes-ref',
+ 'bytes-set!', 'bytes-utf-8-index', 'bytes-utf-8-length',
+ 'bytes-utf-8-ref', 'bytes<?', 'bytes=?', 'bytes>?', 'bytes?', 'caaaar',
+ 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar',
+ 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr',
+ 'call-in-nested-thread', 'call-with-atomic-output-file',
+ 'call-with-break-parameterization',
+ 'call-with-composable-continuation', 'call-with-continuation-barrier',
+ 'call-with-continuation-prompt', 'call-with-current-continuation',
+ 'call-with-default-reading-parameterization',
+ 'call-with-escape-continuation', 'call-with-exception-handler',
+ 'call-with-file-lock/timeout', 'call-with-immediate-continuation-mark',
+ 'call-with-input-bytes', 'call-with-input-file',
+ 'call-with-input-file*', 'call-with-input-string',
+ 'call-with-output-bytes', 'call-with-output-file',
+ 'call-with-output-file*', 'call-with-output-string',
+ 'call-with-parameterization', 'call-with-semaphore',
+ 'call-with-semaphore/enable-break', 'call-with-values', 'call/cc',
+ 'call/ec', 'car', 'cartesian-product', 'cdaaar', 'cdaadr', 'cdaar',
+ 'cdadar', 'cdaddr', 'cdadr', 'cdar', 'cddaar', 'cddadr', 'cddar',
+ 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr', 'ceiling', 'channel-get',
+ 'channel-put', 'channel-put-evt', 'channel-put-evt?',
+ 'channel-try-get', 'channel/c', 'channel?', 'chaperone-box',
+ 'chaperone-channel', 'chaperone-continuation-mark-key',
+ 'chaperone-contract-property?', 'chaperone-contract?', 'chaperone-evt',
+ 'chaperone-hash', 'chaperone-hash-set', 'chaperone-of?',
+ 'chaperone-procedure', 'chaperone-procedure*', 'chaperone-prompt-tag',
+ 'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector',
+ 'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?',
+ 'char-ci<=?', 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?',
+ 'char-downcase', 'char-foldcase', 'char-general-category',
+ 'char-graphic?', 'char-in', 'char-in/c', 'char-iso-control?',
+ 'char-lower-case?', 'char-numeric?', 'char-punctuation?',
+ 'char-ready?', 'char-symbolic?', 'char-title-case?', 'char-titlecase',
+ 'char-upcase', 'char-upper-case?', 'char-utf-8-length',
+ 'char-whitespace?', 'char<=?', 'char<?', 'char=?', 'char>=?', 'char>?',
+ 'char?', 'check-duplicate-identifier', 'check-duplicates',
+ 'checked-procedure-check-and-extract', 'choice-evt',
+ 'class->interface', 'class-info', 'class-seal', 'class-unseal',
+ 'class?', 'cleanse-path', 'close-input-port', 'close-output-port',
+ 'coerce-chaperone-contract', 'coerce-chaperone-contracts',
+ 'coerce-contract', 'coerce-contract/f', 'coerce-contracts',
+ 'coerce-flat-contract', 'coerce-flat-contracts', 'collect-garbage',
+ 'collection-file-path', 'collection-path', 'combinations', 'compile',
+ 'compile-allow-set!-undefined', 'compile-context-preservation-enabled',
+ 'compile-enforce-module-constants', 'compile-syntax',
+ 'compiled-expression-recompile', 'compiled-expression?',
+ 'compiled-module-expression?', 'complete-path?', 'complex?', 'compose',
+ 'compose1', 'conjoin', 'conjugate', 'cons', 'cons/c', 'cons?', 'const',
+ 'continuation-mark-key/c', 'continuation-mark-key?',
+ 'continuation-mark-set->context', 'continuation-mark-set->list',
+ 'continuation-mark-set->list*', 'continuation-mark-set-first',
+ 'continuation-mark-set?', 'continuation-marks',
+ 'continuation-prompt-available?', 'continuation-prompt-tag?',
+ 'continuation?', 'contract-continuation-mark-key',
+ 'contract-custom-write-property-proc', 'contract-exercise',
+ 'contract-first-order', 'contract-first-order-passes?',
+ 'contract-late-neg-projection', 'contract-name', 'contract-proc',
+ 'contract-projection', 'contract-property?',
+ 'contract-random-generate', 'contract-random-generate-fail',
+ 'contract-random-generate-fail?',
+ 'contract-random-generate-get-current-environment',
+ 'contract-random-generate-stash', 'contract-random-generate/choose',
+ 'contract-stronger?', 'contract-struct-exercise',
+ 'contract-struct-generate', 'contract-struct-late-neg-projection',
+ 'contract-struct-list-contract?', 'contract-val-first-projection',
+ 'contract?', 'convert-stream', 'copy-directory/files', 'copy-file',
+ 'copy-port', 'cos', 'cosh', 'count', 'current-blame-format',
+ 'current-break-parameterization', 'current-code-inspector',
+ 'current-command-line-arguments', 'current-compile',
+ 'current-compiled-file-roots', 'current-continuation-marks',
+ 'current-contract-region', 'current-custodian', 'current-directory',
+ 'current-directory-for-user', 'current-drive',
+ 'current-environment-variables', 'current-error-port', 'current-eval',
+ 'current-evt-pseudo-random-generator',
+ 'current-force-delete-permissions', 'current-future',
+ 'current-gc-milliseconds', 'current-get-interaction-input-port',
+ 'current-inexact-milliseconds', 'current-input-port',
+ 'current-inspector', 'current-library-collection-links',
+ 'current-library-collection-paths', 'current-load',
+ 'current-load-extension', 'current-load-relative-directory',
+ 'current-load/use-compiled', 'current-locale', 'current-logger',
+ 'current-memory-use', 'current-milliseconds',
+ 'current-module-declare-name', 'current-module-declare-source',
+ 'current-module-name-resolver', 'current-module-path-for-load',
+ 'current-namespace', 'current-output-port', 'current-parameterization',
+ 'current-plumber', 'current-preserved-thread-cell-values',
+ 'current-print', 'current-process-milliseconds', 'current-prompt-read',
+ 'current-pseudo-random-generator', 'current-read-interaction',
+ 'current-reader-guard', 'current-readtable', 'current-seconds',
+ 'current-security-guard', 'current-subprocess-custodian-mode',
+ 'current-thread', 'current-thread-group',
+ 'current-thread-initial-stack-size',
+ 'current-write-relative-directory', 'curry', 'curryr',
+ 'custodian-box-value', 'custodian-box?', 'custodian-limit-memory',
+ 'custodian-managed-list', 'custodian-memory-accounting-available?',
+ 'custodian-require-memory', 'custodian-shutdown-all', 'custodian?',
+ 'custom-print-quotable-accessor', 'custom-print-quotable?',
+ 'custom-write-accessor', 'custom-write-property-proc', 'custom-write?',
+ 'date', 'date*', 'date*-nanosecond', 'date*-time-zone-name', 'date*?',
+ 'date-day', 'date-dst?', 'date-hour', 'date-minute', 'date-month',
+ 'date-second', 'date-time-zone-offset', 'date-week-day', 'date-year',
+ 'date-year-day', 'date?', 'datum->syntax', 'datum-intern-literal',
+ 'default-continuation-prompt-tag', 'degrees->radians',
+ 'delete-directory', 'delete-directory/files', 'delete-file',
+ 'denominator', 'dict->list', 'dict-can-functional-set?',
+ 'dict-can-remove-keys?', 'dict-clear', 'dict-clear!', 'dict-copy',
+ 'dict-count', 'dict-empty?', 'dict-for-each', 'dict-has-key?',
+ 'dict-implements/c', 'dict-implements?', 'dict-iter-contract',
+ 'dict-iterate-first', 'dict-iterate-key', 'dict-iterate-next',
+ 'dict-iterate-value', 'dict-key-contract', 'dict-keys', 'dict-map',
+ 'dict-mutable?', 'dict-ref', 'dict-ref!', 'dict-remove',
+ 'dict-remove!', 'dict-set', 'dict-set!', 'dict-set*', 'dict-set*!',
+ 'dict-update', 'dict-update!', 'dict-value-contract', 'dict-values',
+ 'dict?', 'directory-exists?', 'directory-list', 'disjoin', 'display',
+ 'display-lines', 'display-lines-to-file', 'display-to-file',
+ 'displayln', 'double-flonum?', 'drop', 'drop-common-prefix',
+ 'drop-right', 'dropf', 'dropf-right', 'dump-memory-stats',
+ 'dup-input-port', 'dup-output-port', 'dynamic->*', 'dynamic-get-field',
+ 'dynamic-object/c', 'dynamic-place', 'dynamic-place*',
+ 'dynamic-require', 'dynamic-require-for-syntax', 'dynamic-send',
+ 'dynamic-set-field!', 'dynamic-wind', 'eighth', 'empty',
+ 'empty-sequence', 'empty-stream', 'empty?',
+ 'environment-variables-copy', 'environment-variables-names',
+ 'environment-variables-ref', 'environment-variables-set!',
+ 'environment-variables?', 'eof', 'eof-evt', 'eof-object?',
+ 'ephemeron-value', 'ephemeron?', 'eprintf', 'eq-contract-val',
+ 'eq-contract?', 'eq-hash-code', 'eq?', 'equal-contract-val',
+ 'equal-contract?', 'equal-hash-code', 'equal-secondary-hash-code',
+ 'equal<%>', 'equal?', 'equal?/recur', 'eqv-hash-code', 'eqv?', 'error',
+ 'error-display-handler', 'error-escape-handler',
+ 'error-print-context-length', 'error-print-source-location',
+ 'error-print-width', 'error-value->string-handler', 'eval',
+ 'eval-jit-enabled', 'eval-syntax', 'even?', 'evt/c', 'evt?',
+ 'exact->inexact', 'exact-ceiling', 'exact-floor', 'exact-integer?',
+ 'exact-nonnegative-integer?', 'exact-positive-integer?', 'exact-round',
+ 'exact-truncate', 'exact?', 'executable-yield-handler', 'exit',
+ 'exit-handler', 'exn', 'exn-continuation-marks', 'exn-message',
+ 'exn:break', 'exn:break-continuation', 'exn:break:hang-up',
+ 'exn:break:hang-up?', 'exn:break:terminate', 'exn:break:terminate?',
+ 'exn:break?', 'exn:fail', 'exn:fail:contract',
+ 'exn:fail:contract:arity', 'exn:fail:contract:arity?',
+ 'exn:fail:contract:blame', 'exn:fail:contract:blame-object',
+ 'exn:fail:contract:blame?', 'exn:fail:contract:continuation',
+ 'exn:fail:contract:continuation?', 'exn:fail:contract:divide-by-zero',
+ 'exn:fail:contract:divide-by-zero?',
+ 'exn:fail:contract:non-fixnum-result',
+ 'exn:fail:contract:non-fixnum-result?', 'exn:fail:contract:variable',
+ 'exn:fail:contract:variable-id', 'exn:fail:contract:variable?',
+ 'exn:fail:contract?', 'exn:fail:filesystem',
+ 'exn:fail:filesystem:errno', 'exn:fail:filesystem:errno-errno',
+ 'exn:fail:filesystem:errno?', 'exn:fail:filesystem:exists',
+ 'exn:fail:filesystem:exists?', 'exn:fail:filesystem:missing-module',
+ 'exn:fail:filesystem:missing-module-path',
+ 'exn:fail:filesystem:missing-module?', 'exn:fail:filesystem:version',
+ 'exn:fail:filesystem:version?', 'exn:fail:filesystem?',
+ 'exn:fail:network', 'exn:fail:network:errno',
+ 'exn:fail:network:errno-errno', 'exn:fail:network:errno?',
+ 'exn:fail:network?', 'exn:fail:object', 'exn:fail:object?',
+ 'exn:fail:out-of-memory', 'exn:fail:out-of-memory?', 'exn:fail:read',
+ 'exn:fail:read-srclocs', 'exn:fail:read:eof', 'exn:fail:read:eof?',
+ 'exn:fail:read:non-char', 'exn:fail:read:non-char?', 'exn:fail:read?',
+ 'exn:fail:syntax', 'exn:fail:syntax-exprs',
+ 'exn:fail:syntax:missing-module',
+ 'exn:fail:syntax:missing-module-path',
+ 'exn:fail:syntax:missing-module?', 'exn:fail:syntax:unbound',
+ 'exn:fail:syntax:unbound?', 'exn:fail:syntax?', 'exn:fail:unsupported',
+ 'exn:fail:unsupported?', 'exn:fail:user', 'exn:fail:user?',
+ 'exn:fail?', 'exn:misc:match?', 'exn:missing-module-accessor',
+ 'exn:missing-module?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?',
+ 'exp', 'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once',
+ 'expand-syntax-to-top-form', 'expand-to-top-form', 'expand-user-path',
+ 'explode-path', 'expt', 'externalizable<%>', 'failure-result/c',
+ 'false?', 'field-names', 'fifth', 'file->bytes', 'file->bytes-lines',
+ 'file->lines', 'file->list', 'file->string', 'file->value',
+ 'file-exists?', 'file-name-from-path', 'file-or-directory-identity',
+ 'file-or-directory-modify-seconds', 'file-or-directory-permissions',
+ 'file-position', 'file-position*', 'file-size',
+ 'file-stream-buffer-mode', 'file-stream-port?', 'file-truncate',
+ 'filename-extension', 'filesystem-change-evt',
+ 'filesystem-change-evt-cancel', 'filesystem-change-evt?',
+ 'filesystem-root-list', 'filter', 'filter-map', 'filter-not',
+ 'filter-read-input-port', 'find-executable-path', 'find-files',
+ 'find-library-collection-links', 'find-library-collection-paths',
+ 'find-relative-path', 'find-system-path', 'findf', 'first',
+ 'first-or/c', 'fixnum?', 'flat-contract', 'flat-contract-predicate',
+ 'flat-contract-property?', 'flat-contract?', 'flat-named-contract',
+ 'flatten', 'floating-point-bytes->real', 'flonum?', 'floor',
+ 'flush-output', 'fold-files', 'foldl', 'foldr', 'for-each', 'force',
+ 'format', 'fourth', 'fprintf', 'free-identifier=?',
+ 'free-label-identifier=?', 'free-template-identifier=?',
+ 'free-transformer-identifier=?', 'fsemaphore-count', 'fsemaphore-post',
+ 'fsemaphore-try-wait?', 'fsemaphore-wait', 'fsemaphore?', 'future',
+ 'future?', 'futures-enabled?', 'gcd', 'generate-member-key',
+ 'generate-temporaries', 'generic-set?', 'generic?', 'gensym',
+ 'get-output-bytes', 'get-output-string', 'get-preference',
+ 'get/build-late-neg-projection', 'get/build-val-first-projection',
+ 'getenv', 'global-port-print-handler', 'group-by', 'group-execute-bit',
+ 'group-read-bit', 'group-write-bit', 'guard-evt', 'handle-evt',
+ 'handle-evt?', 'has-blame?', 'has-contract?', 'hash', 'hash->list',
+ 'hash-clear', 'hash-clear!', 'hash-copy', 'hash-copy-clear',
+ 'hash-count', 'hash-empty?', 'hash-eq?', 'hash-equal?', 'hash-eqv?',
+ 'hash-for-each', 'hash-has-key?', 'hash-iterate-first',
+ 'hash-iterate-key', 'hash-iterate-key+value', 'hash-iterate-next',
+ 'hash-iterate-pair', 'hash-iterate-value', 'hash-keys', 'hash-map',
+ 'hash-placeholder?', 'hash-ref', 'hash-ref!', 'hash-remove',
+ 'hash-remove!', 'hash-set', 'hash-set!', 'hash-set*', 'hash-set*!',
+ 'hash-update', 'hash-update!', 'hash-values', 'hash-weak?', 'hash/c',
+ 'hash?', 'hasheq', 'hasheqv', 'identifier-binding',
+ 'identifier-binding-symbol', 'identifier-label-binding',
+ 'identifier-prune-lexical-context',
+ 'identifier-prune-to-source-module',
+ 'identifier-remove-from-definition-context',
+ 'identifier-template-binding', 'identifier-transformer-binding',
+ 'identifier?', 'identity', 'if/c', 'imag-part', 'immutable?',
+ 'impersonate-box', 'impersonate-channel',
+ 'impersonate-continuation-mark-key', 'impersonate-hash',
+ 'impersonate-hash-set', 'impersonate-procedure',
+ 'impersonate-procedure*', 'impersonate-prompt-tag',
+ 'impersonate-struct', 'impersonate-vector', 'impersonator-contract?',
+ 'impersonator-ephemeron', 'impersonator-of?',
+ 'impersonator-prop:application-mark', 'impersonator-prop:blame',
+ 'impersonator-prop:contracted',
+ 'impersonator-property-accessor-procedure?', 'impersonator-property?',
+ 'impersonator?', 'implementation?', 'implementation?/c', 'in-bytes',
+ 'in-bytes-lines', 'in-combinations', 'in-cycle', 'in-dict',
+ 'in-dict-keys', 'in-dict-pairs', 'in-dict-values', 'in-directory',
+ 'in-hash', 'in-hash-keys', 'in-hash-pairs', 'in-hash-values',
+ 'in-immutable-hash', 'in-immutable-hash-keys',
+ 'in-immutable-hash-pairs', 'in-immutable-hash-values',
+ 'in-immutable-set', 'in-indexed', 'in-input-port-bytes',
+ 'in-input-port-chars', 'in-lines', 'in-list', 'in-mlist',
+ 'in-mutable-hash', 'in-mutable-hash-keys', 'in-mutable-hash-pairs',
+ 'in-mutable-hash-values', 'in-mutable-set', 'in-naturals',
+ 'in-parallel', 'in-permutations', 'in-port', 'in-producer', 'in-range',
+ 'in-sequences', 'in-set', 'in-slice', 'in-stream', 'in-string',
+ 'in-syntax', 'in-value', 'in-values*-sequence', 'in-values-sequence',
+ 'in-vector', 'in-weak-hash', 'in-weak-hash-keys', 'in-weak-hash-pairs',
+ 'in-weak-hash-values', 'in-weak-set', 'inexact->exact',
+ 'inexact-real?', 'inexact?', 'infinite?', 'input-port-append',
+ 'input-port?', 'inspector?', 'instanceof/c', 'integer->char',
+ 'integer->integer-bytes', 'integer-bytes->integer', 'integer-in',
+ 'integer-length', 'integer-sqrt', 'integer-sqrt/remainder', 'integer?',
+ 'interface->method-names', 'interface-extension?', 'interface?',
+ 'internal-definition-context-binding-identifiers',
+ 'internal-definition-context-introduce',
+ 'internal-definition-context-seal', 'internal-definition-context?',
+ 'is-a?', 'is-a?/c', 'keyword->string', 'keyword-apply', 'keyword<?',
+ 'keyword?', 'keywords-match', 'kill-thread', 'last', 'last-pair',
+ 'lcm', 'length', 'liberal-define-context?', 'link-exists?', 'list',
+ 'list*', 'list*of', 'list->bytes', 'list->mutable-set',
+ 'list->mutable-seteq', 'list->mutable-seteqv', 'list->set',
+ 'list->seteq', 'list->seteqv', 'list->string', 'list->vector',
+ 'list->weak-set', 'list->weak-seteq', 'list->weak-seteqv',
+ 'list-contract?', 'list-prefix?', 'list-ref', 'list-set', 'list-tail',
+ 'list-update', 'list/c', 'list?', 'listen-port-number?', 'listof',
+ 'load', 'load-extension', 'load-on-demand-enabled', 'load-relative',
+ 'load-relative-extension', 'load/cd', 'load/use-compiled',
+ 'local-expand', 'local-expand/capture-lifts',
+ 'local-transformer-expand', 'local-transformer-expand/capture-lifts',
+ 'locale-string-encoding', 'log', 'log-all-levels', 'log-level-evt',
+ 'log-level?', 'log-max-level', 'log-message', 'log-receiver?',
+ 'logger-name', 'logger?', 'magnitude', 'make-arity-at-least',
+ 'make-base-empty-namespace', 'make-base-namespace', 'make-bytes',
+ 'make-channel', 'make-chaperone-contract',
+ 'make-continuation-mark-key', 'make-continuation-prompt-tag',
+ 'make-contract', 'make-custodian', 'make-custodian-box',
+ 'make-custom-hash', 'make-custom-hash-types', 'make-custom-set',
+ 'make-custom-set-types', 'make-date', 'make-date*',
+ 'make-derived-parameter', 'make-directory', 'make-directory*',
+ 'make-do-sequence', 'make-empty-namespace',
+ 'make-environment-variables', 'make-ephemeron', 'make-exn',
+ 'make-exn:break', 'make-exn:break:hang-up', 'make-exn:break:terminate',
+ 'make-exn:fail', 'make-exn:fail:contract',
+ 'make-exn:fail:contract:arity', 'make-exn:fail:contract:blame',
+ 'make-exn:fail:contract:continuation',
+ 'make-exn:fail:contract:divide-by-zero',
+ 'make-exn:fail:contract:non-fixnum-result',
+ 'make-exn:fail:contract:variable', 'make-exn:fail:filesystem',
+ 'make-exn:fail:filesystem:errno', 'make-exn:fail:filesystem:exists',
+ 'make-exn:fail:filesystem:missing-module',
+ 'make-exn:fail:filesystem:version', 'make-exn:fail:network',
+ 'make-exn:fail:network:errno', 'make-exn:fail:object',
+ 'make-exn:fail:out-of-memory', 'make-exn:fail:read',
+ 'make-exn:fail:read:eof', 'make-exn:fail:read:non-char',
+ 'make-exn:fail:syntax', 'make-exn:fail:syntax:missing-module',
+ 'make-exn:fail:syntax:unbound', 'make-exn:fail:unsupported',
+ 'make-exn:fail:user', 'make-file-or-directory-link',
+ 'make-flat-contract', 'make-fsemaphore', 'make-generic',
+ 'make-handle-get-preference-locked', 'make-hash',
+ 'make-hash-placeholder', 'make-hasheq', 'make-hasheq-placeholder',
+ 'make-hasheqv', 'make-hasheqv-placeholder',
+ 'make-immutable-custom-hash', 'make-immutable-hash',
+ 'make-immutable-hasheq', 'make-immutable-hasheqv',
+ 'make-impersonator-property', 'make-input-port',
+ 'make-input-port/read-to-peek', 'make-inspector',
+ 'make-keyword-procedure', 'make-known-char-range-list',
+ 'make-limited-input-port', 'make-list', 'make-lock-file-name',
+ 'make-log-receiver', 'make-logger', 'make-mixin-contract',
+ 'make-mutable-custom-set', 'make-none/c', 'make-object',
+ 'make-output-port', 'make-parameter', 'make-parent-directory*',
+ 'make-phantom-bytes', 'make-pipe', 'make-pipe-with-specials',
+ 'make-placeholder', 'make-plumber', 'make-polar', 'make-prefab-struct',
+ 'make-primitive-class', 'make-proj-contract',
+ 'make-pseudo-random-generator', 'make-reader-graph', 'make-readtable',
+ 'make-rectangular', 'make-rename-transformer',
+ 'make-resolved-module-path', 'make-security-guard', 'make-semaphore',
+ 'make-set!-transformer', 'make-shared-bytes', 'make-sibling-inspector',
+ 'make-special-comment', 'make-srcloc', 'make-string',
+ 'make-struct-field-accessor', 'make-struct-field-mutator',
+ 'make-struct-type', 'make-struct-type-property',
+ 'make-syntax-delta-introducer', 'make-syntax-introducer',
+ 'make-temporary-file', 'make-tentative-pretty-print-output-port',
+ 'make-thread-cell', 'make-thread-group', 'make-vector',
+ 'make-weak-box', 'make-weak-custom-hash', 'make-weak-custom-set',
+ 'make-weak-hash', 'make-weak-hasheq', 'make-weak-hasheqv',
+ 'make-will-executor', 'map', 'match-equality-test',
+ 'matches-arity-exactly?', 'max', 'mcar', 'mcdr', 'mcons', 'member',
+ 'member-name-key-hash-code', 'member-name-key=?', 'member-name-key?',
+ 'memf', 'memq', 'memv', 'merge-input', 'method-in-interface?', 'min',
+ 'mixin-contract', 'module->exports', 'module->imports',
+ 'module->language-info', 'module->namespace',
+ 'module-compiled-cross-phase-persistent?', 'module-compiled-exports',
+ 'module-compiled-imports', 'module-compiled-language-info',
+ 'module-compiled-name', 'module-compiled-submodules',
+ 'module-declared?', 'module-path-index-join',
+ 'module-path-index-resolve', 'module-path-index-split',
+ 'module-path-index-submodule', 'module-path-index?', 'module-path?',
+ 'module-predefined?', 'module-provide-protected?', 'modulo', 'mpair?',
+ 'mutable-set', 'mutable-seteq', 'mutable-seteqv', 'n->th',
+ 'nack-guard-evt', 'namespace-anchor->empty-namespace',
+ 'namespace-anchor->namespace', 'namespace-anchor?',
+ 'namespace-attach-module', 'namespace-attach-module-declaration',
+ 'namespace-base-phase', 'namespace-mapped-symbols',
+ 'namespace-module-identifier', 'namespace-module-registry',
+ 'namespace-require', 'namespace-require/constant',
+ 'namespace-require/copy', 'namespace-require/expansion-time',
+ 'namespace-set-variable-value!', 'namespace-symbol->identifier',
+ 'namespace-syntax-introduce', 'namespace-undefine-variable!',
+ 'namespace-unprotect-module', 'namespace-variable-value', 'namespace?',
+ 'nan?', 'natural-number/c', 'negate', 'negative?', 'never-evt',
+ 'new-∀/c', 'new-∃/c', 'newline', 'ninth', 'non-empty-listof',
+ 'non-empty-string?', 'none/c', 'normal-case-path', 'normalize-arity',
+ 'normalize-path', 'normalized-arity?', 'not', 'not/c', 'null', 'null?',
+ 'number->string', 'number?', 'numerator', 'object%', 'object->vector',
+ 'object-info', 'object-interface', 'object-method-arity-includes?',
+ 'object-name', 'object-or-false=?', 'object=?', 'object?', 'odd?',
+ 'one-of/c', 'open-input-bytes', 'open-input-file',
+ 'open-input-output-file', 'open-input-string', 'open-output-bytes',
+ 'open-output-file', 'open-output-nowhere', 'open-output-string',
+ 'or/c', 'order-of-magnitude', 'ormap', 'other-execute-bit',
+ 'other-read-bit', 'other-write-bit', 'output-port?', 'pair?',
+ 'parameter-procedure=?', 'parameter/c', 'parameter?',
+ 'parameterization?', 'parse-command-line', 'partition', 'path->bytes',
+ 'path->complete-path', 'path->directory-path', 'path->string',
+ 'path-add-suffix', 'path-convention-type', 'path-element->bytes',
+ 'path-element->string', 'path-element?', 'path-for-some-system?',
+ 'path-list-string->path-list', 'path-only', 'path-replace-suffix',
+ 'path-string?', 'path<?', 'path?', 'pathlist-closure', 'peek-byte',
+ 'peek-byte-or-special', 'peek-bytes', 'peek-bytes!', 'peek-bytes!-evt',
+ 'peek-bytes-avail!', 'peek-bytes-avail!*', 'peek-bytes-avail!-evt',
+ 'peek-bytes-avail!/enable-break', 'peek-bytes-evt', 'peek-char',
+ 'peek-char-or-special', 'peek-string', 'peek-string!',
+ 'peek-string!-evt', 'peek-string-evt', 'peeking-input-port',
+ 'permutations', 'phantom-bytes?', 'pi', 'pi.f', 'pipe-content-length',
+ 'place-break', 'place-channel', 'place-channel-get',
+ 'place-channel-put', 'place-channel-put/get', 'place-channel?',
+ 'place-dead-evt', 'place-enabled?', 'place-kill', 'place-location?',
+ 'place-message-allowed?', 'place-sleep', 'place-wait', 'place?',
+ 'placeholder-get', 'placeholder-set!', 'placeholder?',
+ 'plumber-add-flush!', 'plumber-flush-all',
+ 'plumber-flush-handle-remove!', 'plumber-flush-handle?', 'plumber?',
+ 'poll-guard-evt', 'port->bytes', 'port->bytes-lines', 'port->lines',
+ 'port->list', 'port->string', 'port-closed-evt', 'port-closed?',
+ 'port-commit-peeked', 'port-count-lines!', 'port-count-lines-enabled',
+ 'port-counts-lines?', 'port-display-handler', 'port-file-identity',
+ 'port-file-unlock', 'port-next-location', 'port-number?',
+ 'port-print-handler', 'port-progress-evt',
+ 'port-provides-progress-evts?', 'port-read-handler',
+ 'port-try-file-lock?', 'port-write-handler', 'port-writes-atomic?',
+ 'port-writes-special?', 'port?', 'positive?', 'predicate/c',
+ 'prefab-key->struct-type', 'prefab-key?', 'prefab-struct-key',
+ 'preferences-lock-file-mode', 'pregexp', 'pregexp?', 'pretty-display',
+ 'pretty-format', 'pretty-print', 'pretty-print-.-symbol-without-bars',
+ 'pretty-print-abbreviate-read-macros', 'pretty-print-columns',
+ 'pretty-print-current-style-table', 'pretty-print-depth',
+ 'pretty-print-exact-as-decimal', 'pretty-print-extend-style-table',
+ 'pretty-print-handler', 'pretty-print-newline',
+ 'pretty-print-post-print-hook', 'pretty-print-pre-print-hook',
+ 'pretty-print-print-hook', 'pretty-print-print-line',
+ 'pretty-print-remap-stylable', 'pretty-print-show-inexactness',
+ 'pretty-print-size-hook', 'pretty-print-style-table?',
+ 'pretty-printing', 'pretty-write', 'primitive-closure?',
+ 'primitive-result-arity', 'primitive?', 'print', 'print-as-expression',
+ 'print-boolean-long-form', 'print-box', 'print-graph',
+ 'print-hash-table', 'print-mpair-curly-braces',
+ 'print-pair-curly-braces', 'print-reader-abbreviations',
+ 'print-struct', 'print-syntax-width', 'print-unreadable',
+ 'print-vector-length', 'printable/c', 'printable<%>', 'printf',
+ 'println', 'procedure->method', 'procedure-arity',
+ 'procedure-arity-includes/c', 'procedure-arity-includes?',
+ 'procedure-arity?', 'procedure-closure-contents-eq?',
+ 'procedure-extract-target', 'procedure-keywords',
+ 'procedure-reduce-arity', 'procedure-reduce-keyword-arity',
+ 'procedure-rename', 'procedure-result-arity', 'procedure-specialize',
+ 'procedure-struct-type?', 'procedure?', 'process', 'process*',
+ 'process*/ports', 'process/ports', 'processor-count', 'progress-evt?',
+ 'promise-forced?', 'promise-running?', 'promise/c', 'promise/name?',
+ 'promise?', 'prop:arity-string', 'prop:arrow-contract',
+ 'prop:arrow-contract-get-info', 'prop:arrow-contract?', 'prop:blame',
+ 'prop:chaperone-contract', 'prop:checked-procedure', 'prop:contract',
+ 'prop:contracted', 'prop:custom-print-quotable', 'prop:custom-write',
+ 'prop:dict', 'prop:dict/contract', 'prop:equal+hash', 'prop:evt',
+ 'prop:exn:missing-module', 'prop:exn:srclocs',
+ 'prop:expansion-contexts', 'prop:flat-contract',
+ 'prop:impersonator-of', 'prop:input-port',
+ 'prop:liberal-define-context', 'prop:object-name',
+ 'prop:opt-chaperone-contract', 'prop:opt-chaperone-contract-get-test',
+ 'prop:opt-chaperone-contract?', 'prop:orc-contract',
+ 'prop:orc-contract-get-subcontracts', 'prop:orc-contract?',
+ 'prop:output-port', 'prop:place-location', 'prop:procedure',
+ 'prop:recursive-contract', 'prop:recursive-contract-unroll',
+ 'prop:recursive-contract?', 'prop:rename-transformer', 'prop:sequence',
+ 'prop:set!-transformer', 'prop:stream', 'proper-subset?',
+ 'pseudo-random-generator->vector', 'pseudo-random-generator-vector?',
+ 'pseudo-random-generator?', 'put-preferences', 'putenv', 'quotient',
+ 'quotient/remainder', 'radians->degrees', 'raise',
+ 'raise-argument-error', 'raise-arguments-error', 'raise-arity-error',
+ 'raise-blame-error', 'raise-contract-error', 'raise-mismatch-error',
+ 'raise-not-cons-blame-error', 'raise-range-error',
+ 'raise-result-error', 'raise-syntax-error', 'raise-type-error',
+ 'raise-user-error', 'random', 'random-seed', 'range', 'rational?',
+ 'rationalize', 'read', 'read-accept-bar-quote', 'read-accept-box',
+ 'read-accept-compiled', 'read-accept-dot', 'read-accept-graph',
+ 'read-accept-infix-dot', 'read-accept-lang', 'read-accept-quasiquote',
+ 'read-accept-reader', 'read-byte', 'read-byte-or-special',
+ 'read-bytes', 'read-bytes!', 'read-bytes!-evt', 'read-bytes-avail!',
+ 'read-bytes-avail!*', 'read-bytes-avail!-evt',
+ 'read-bytes-avail!/enable-break', 'read-bytes-evt', 'read-bytes-line',
+ 'read-bytes-line-evt', 'read-case-sensitive', 'read-cdot', 'read-char',
+ 'read-char-or-special', 'read-curly-brace-as-paren',
+ 'read-curly-brace-with-tag', 'read-decimal-as-inexact',
+ 'read-eval-print-loop', 'read-language', 'read-line', 'read-line-evt',
+ 'read-on-demand-source', 'read-square-bracket-as-paren',
+ 'read-square-bracket-with-tag', 'read-string', 'read-string!',
+ 'read-string!-evt', 'read-string-evt', 'read-syntax',
+ 'read-syntax/recursive', 'read/recursive', 'readtable-mapping',
+ 'readtable?', 'real->decimal-string', 'real->double-flonum',
+ 'real->floating-point-bytes', 'real->single-flonum', 'real-in',
+ 'real-part', 'real?', 'reencode-input-port', 'reencode-output-port',
+ 'regexp', 'regexp-match', 'regexp-match*', 'regexp-match-evt',
+ 'regexp-match-exact?', 'regexp-match-peek',
+ 'regexp-match-peek-immediate', 'regexp-match-peek-positions',
+ 'regexp-match-peek-positions*',
+ 'regexp-match-peek-positions-immediate',
+ 'regexp-match-peek-positions-immediate/end',
+ 'regexp-match-peek-positions/end', 'regexp-match-positions',
+ 'regexp-match-positions*', 'regexp-match-positions/end',
+ 'regexp-match/end', 'regexp-match?', 'regexp-max-lookbehind',
+ 'regexp-quote', 'regexp-replace', 'regexp-replace*',
+ 'regexp-replace-quote', 'regexp-replaces', 'regexp-split',
+ 'regexp-try-match', 'regexp?', 'relative-path?', 'relocate-input-port',
+ 'relocate-output-port', 'remainder', 'remf', 'remf*', 'remove',
+ 'remove*', 'remove-duplicates', 'remq', 'remq*', 'remv', 'remv*',
+ 'rename-contract', 'rename-file-or-directory',
+ 'rename-transformer-target', 'rename-transformer?', 'replace-evt',
+ 'reroot-path', 'resolve-path', 'resolved-module-path-name',
+ 'resolved-module-path?', 'rest', 'reverse', 'round', 'second',
+ 'seconds->date', 'security-guard?', 'semaphore-peek-evt',
+ 'semaphore-peek-evt?', 'semaphore-post', 'semaphore-try-wait?',
+ 'semaphore-wait', 'semaphore-wait/enable-break', 'semaphore?',
+ 'sequence->list', 'sequence->stream', 'sequence-add-between',
+ 'sequence-andmap', 'sequence-append', 'sequence-count',
+ 'sequence-filter', 'sequence-fold', 'sequence-for-each',
+ 'sequence-generate', 'sequence-generate*', 'sequence-length',
+ 'sequence-map', 'sequence-ormap', 'sequence-ref', 'sequence-tail',
+ 'sequence/c', 'sequence?', 'set', 'set!-transformer-procedure',
+ 'set!-transformer?', 'set->list', 'set->stream', 'set-add', 'set-add!',
+ 'set-box!', 'set-clear', 'set-clear!', 'set-copy', 'set-copy-clear',
+ 'set-count', 'set-empty?', 'set-eq?', 'set-equal?', 'set-eqv?',
+ 'set-first', 'set-for-each', 'set-implements/c', 'set-implements?',
+ 'set-intersect', 'set-intersect!', 'set-map', 'set-mcar!', 'set-mcdr!',
+ 'set-member?', 'set-mutable?', 'set-phantom-bytes!',
+ 'set-port-next-location!', 'set-remove', 'set-remove!', 'set-rest',
+ 'set-some-basic-contracts!', 'set-subtract', 'set-subtract!',
+ 'set-symmetric-difference', 'set-symmetric-difference!', 'set-union',
+ 'set-union!', 'set-weak?', 'set/c', 'set=?', 'set?', 'seteq', 'seteqv',
+ 'seventh', 'sgn', 'shared-bytes', 'shell-execute', 'shrink-path-wrt',
+ 'shuffle', 'simple-form-path', 'simplify-path', 'sin',
+ 'single-flonum?', 'sinh', 'sixth', 'skip-projection-wrapper?', 'sleep',
+ 'some-system-path->string', 'sort', 'special-comment-value',
+ 'special-comment?', 'special-filter-input-port', 'split-at',
+ 'split-at-right', 'split-common-prefix', 'split-path', 'splitf-at',
+ 'splitf-at-right', 'sqr', 'sqrt', 'srcloc', 'srcloc->string',
+ 'srcloc-column', 'srcloc-line', 'srcloc-position', 'srcloc-source',
+ 'srcloc-span', 'srcloc?', 'stop-after', 'stop-before', 'stream->list',
+ 'stream-add-between', 'stream-andmap', 'stream-append', 'stream-count',
+ 'stream-empty?', 'stream-filter', 'stream-first', 'stream-fold',
+ 'stream-for-each', 'stream-length', 'stream-map', 'stream-ormap',
+ 'stream-ref', 'stream-rest', 'stream-tail', 'stream/c', 'stream?',
+ 'string', 'string->bytes/latin-1', 'string->bytes/locale',
+ 'string->bytes/utf-8', 'string->immutable-string', 'string->keyword',
+ 'string->list', 'string->number', 'string->path',
+ 'string->path-element', 'string->some-system-path', 'string->symbol',
+ 'string->uninterned-symbol', 'string->unreadable-symbol',
+ 'string-append', 'string-append*', 'string-ci<=?', 'string-ci<?',
+ 'string-ci=?', 'string-ci>=?', 'string-ci>?', 'string-contains?',
+ 'string-copy', 'string-copy!', 'string-downcase',
+ 'string-environment-variable-name?', 'string-fill!', 'string-foldcase',
+ 'string-join', 'string-len/c', 'string-length', 'string-locale-ci<?',
+ 'string-locale-ci=?', 'string-locale-ci>?', 'string-locale-downcase',
+ 'string-locale-upcase', 'string-locale<?', 'string-locale=?',
+ 'string-locale>?', 'string-no-nuls?', 'string-normalize-nfc',
+ 'string-normalize-nfd', 'string-normalize-nfkc',
+ 'string-normalize-nfkd', 'string-normalize-spaces', 'string-port?',
+ 'string-prefix?', 'string-ref', 'string-replace', 'string-set!',
+ 'string-split', 'string-suffix?', 'string-titlecase', 'string-trim',
+ 'string-upcase', 'string-utf-8-length', 'string<=?', 'string<?',
+ 'string=?', 'string>=?', 'string>?', 'string?', 'struct->vector',
+ 'struct-accessor-procedure?', 'struct-constructor-procedure?',
+ 'struct-info', 'struct-mutator-procedure?',
+ 'struct-predicate-procedure?', 'struct-type-info',
+ 'struct-type-make-constructor', 'struct-type-make-predicate',
+ 'struct-type-property-accessor-procedure?', 'struct-type-property/c',
+ 'struct-type-property?', 'struct-type?', 'struct:arity-at-least',
+ 'struct:arrow-contract-info', 'struct:date', 'struct:date*',
+ 'struct:exn', 'struct:exn:break', 'struct:exn:break:hang-up',
+ 'struct:exn:break:terminate', 'struct:exn:fail',
+ 'struct:exn:fail:contract', 'struct:exn:fail:contract:arity',
+ 'struct:exn:fail:contract:blame',
+ 'struct:exn:fail:contract:continuation',
+ 'struct:exn:fail:contract:divide-by-zero',
+ 'struct:exn:fail:contract:non-fixnum-result',
+ 'struct:exn:fail:contract:variable', 'struct:exn:fail:filesystem',
+ 'struct:exn:fail:filesystem:errno',
+ 'struct:exn:fail:filesystem:exists',
+ 'struct:exn:fail:filesystem:missing-module',
+ 'struct:exn:fail:filesystem:version', 'struct:exn:fail:network',
+ 'struct:exn:fail:network:errno', 'struct:exn:fail:object',
+ 'struct:exn:fail:out-of-memory', 'struct:exn:fail:read',
+ 'struct:exn:fail:read:eof', 'struct:exn:fail:read:non-char',
+ 'struct:exn:fail:syntax', 'struct:exn:fail:syntax:missing-module',
+ 'struct:exn:fail:syntax:unbound', 'struct:exn:fail:unsupported',
+ 'struct:exn:fail:user', 'struct:srcloc',
+ 'struct:wrapped-extra-arg-arrow', 'struct?', 'sub1', 'subbytes',
+ 'subclass?', 'subclass?/c', 'subprocess', 'subprocess-group-enabled',
+ 'subprocess-kill', 'subprocess-pid', 'subprocess-status',
+ 'subprocess-wait', 'subprocess?', 'subset?', 'substring', 'suggest/c',
+ 'symbol->string', 'symbol-interned?', 'symbol-unreadable?', 'symbol<?',
+ 'symbol=?', 'symbol?', 'symbols', 'sync', 'sync/enable-break',
+ 'sync/timeout', 'sync/timeout/enable-break', 'syntax->datum',
+ 'syntax->list', 'syntax-arm', 'syntax-column', 'syntax-debug-info',
+ 'syntax-disarm', 'syntax-e', 'syntax-line',
+ 'syntax-local-bind-syntaxes', 'syntax-local-certifier',
+ 'syntax-local-context', 'syntax-local-expand-expression',
+ 'syntax-local-get-shadower', 'syntax-local-identifier-as-binding',
+ 'syntax-local-introduce', 'syntax-local-lift-context',
+ 'syntax-local-lift-expression', 'syntax-local-lift-module',
+ 'syntax-local-lift-module-end-declaration',
+ 'syntax-local-lift-provide', 'syntax-local-lift-require',
+ 'syntax-local-lift-values-expression',
+ 'syntax-local-make-definition-context',
+ 'syntax-local-make-delta-introducer',
+ 'syntax-local-module-defined-identifiers',
+ 'syntax-local-module-exports',
+ 'syntax-local-module-required-identifiers', 'syntax-local-name',
+ 'syntax-local-phase-level', 'syntax-local-submodules',
+ 'syntax-local-transforming-module-provides?', 'syntax-local-value',
+ 'syntax-local-value/immediate', 'syntax-original?', 'syntax-position',
+ 'syntax-property', 'syntax-property-preserved?',
+ 'syntax-property-symbol-keys', 'syntax-protect', 'syntax-rearm',
+ 'syntax-recertify', 'syntax-shift-phase-level', 'syntax-source',
+ 'syntax-source-module', 'syntax-span', 'syntax-taint',
+ 'syntax-tainted?', 'syntax-track-origin',
+ 'syntax-transforming-module-expression?',
+ 'syntax-transforming-with-lifts?', 'syntax-transforming?', 'syntax/c',
+ 'syntax?', 'system', 'system*', 'system*/exit-code',
+ 'system-big-endian?', 'system-idle-evt', 'system-language+country',
+ 'system-library-subpath', 'system-path-convention-type', 'system-type',
+ 'system/exit-code', 'tail-marks-match?', 'take', 'take-common-prefix',
+ 'take-right', 'takef', 'takef-right', 'tan', 'tanh',
+ 'tcp-abandon-port', 'tcp-accept', 'tcp-accept-evt',
+ 'tcp-accept-ready?', 'tcp-accept/enable-break', 'tcp-addresses',
+ 'tcp-close', 'tcp-connect', 'tcp-connect/enable-break', 'tcp-listen',
+ 'tcp-listener?', 'tcp-port?', 'tentative-pretty-print-port-cancel',
+ 'tentative-pretty-print-port-transfer', 'tenth', 'terminal-port?',
+ 'the-unsupplied-arg', 'third', 'thread', 'thread-cell-ref',
+ 'thread-cell-set!', 'thread-cell-values?', 'thread-cell?',
+ 'thread-dead-evt', 'thread-dead?', 'thread-group?', 'thread-receive',
+ 'thread-receive-evt', 'thread-resume', 'thread-resume-evt',
+ 'thread-rewind-receive', 'thread-running?', 'thread-send',
+ 'thread-suspend', 'thread-suspend-evt', 'thread-try-receive',
+ 'thread-wait', 'thread/suspend-to-kill', 'thread?', 'time-apply',
+ 'touch', 'transplant-input-port', 'transplant-output-port', 'true',
+ 'truncate', 'udp-addresses', 'udp-bind!', 'udp-bound?', 'udp-close',
+ 'udp-connect!', 'udp-connected?', 'udp-multicast-interface',
+ 'udp-multicast-join-group!', 'udp-multicast-leave-group!',
+ 'udp-multicast-loopback?', 'udp-multicast-set-interface!',
+ 'udp-multicast-set-loopback!', 'udp-multicast-set-ttl!',
+ 'udp-multicast-ttl', 'udp-open-socket', 'udp-receive!',
+ 'udp-receive!*', 'udp-receive!-evt', 'udp-receive!/enable-break',
+ 'udp-receive-ready-evt', 'udp-send', 'udp-send*', 'udp-send-evt',
+ 'udp-send-ready-evt', 'udp-send-to', 'udp-send-to*', 'udp-send-to-evt',
+ 'udp-send-to/enable-break', 'udp-send/enable-break', 'udp?', 'unbox',
+ 'uncaught-exception-handler', 'unit?', 'unspecified-dom',
+ 'unsupplied-arg?', 'use-collection-link-paths',
+ 'use-compiled-file-paths', 'use-user-specific-search-paths',
+ 'user-execute-bit', 'user-read-bit', 'user-write-bit', 'value-blame',
+ 'value-contract', 'values', 'variable-reference->empty-namespace',
+ 'variable-reference->module-base-phase',
+ 'variable-reference->module-declaration-inspector',
+ 'variable-reference->module-path-index',
+ 'variable-reference->module-source', 'variable-reference->namespace',
+ 'variable-reference->phase',
+ 'variable-reference->resolved-module-path',
+ 'variable-reference-constant?', 'variable-reference?', 'vector',
+ 'vector->immutable-vector', 'vector->list',
+ 'vector->pseudo-random-generator', 'vector->pseudo-random-generator!',
+ 'vector->values', 'vector-append', 'vector-argmax', 'vector-argmin',
+ 'vector-copy', 'vector-copy!', 'vector-count', 'vector-drop',
+ 'vector-drop-right', 'vector-fill!', 'vector-filter',
+ 'vector-filter-not', 'vector-immutable', 'vector-immutable/c',
+ 'vector-immutableof', 'vector-length', 'vector-map', 'vector-map!',
+ 'vector-member', 'vector-memq', 'vector-memv', 'vector-ref',
+ 'vector-set!', 'vector-set*!', 'vector-set-performance-stats!',
+ 'vector-split-at', 'vector-split-at-right', 'vector-take',
+ 'vector-take-right', 'vector/c', 'vector?', 'vectorof', 'version',
+ 'void', 'void?', 'weak-box-value', 'weak-box?', 'weak-set',
+ 'weak-seteq', 'weak-seteqv', 'will-execute', 'will-executor?',
+ 'will-register', 'will-try-execute', 'with-input-from-bytes',
+ 'with-input-from-file', 'with-input-from-string',
+ 'with-output-to-bytes', 'with-output-to-file', 'with-output-to-string',
+ 'would-be-future', 'wrap-evt', 'wrapped-extra-arg-arrow',
+ 'wrapped-extra-arg-arrow-extra-neg-party-argument',
+ 'wrapped-extra-arg-arrow-real-func', 'wrapped-extra-arg-arrow?',
+ 'writable<%>', 'write', 'write-byte', 'write-bytes',
+ 'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt',
+ 'write-bytes-avail/enable-break', 'write-char', 'write-special',
+ 'write-special-avail*', 'write-special-evt', 'write-string',
+ 'write-to-file', 'writeln', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a',
+ '~e', '~r', '~s', '~v'
)
_opening_parenthesis = r'[([{]'
@@ -1295,7 +1295,7 @@ class RacketLexer(RegexLexer):
],
'datum': [
(r'(?s)#;|#![ /]([^\\\n]|\\.)*', Comment),
- (r';[^\n\r\x85\u2028\u2029]*', Comment.Single),
+ (r';[^\n\r\x85\u2028\u2029]*', Comment.Single),
(r'#\|', Comment.Multiline, 'block-comment'),
# Whitespaces
@@ -1515,7 +1515,7 @@ class NewLispLexer(RegexLexer):
(r'\s+', Text),
# strings, symbols and characters
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# braces
(r'\{', String, "bracestring"),
@@ -1562,7 +1562,7 @@ class EmacsLispLexer(RegexLexer):
.. versionadded:: 2.1
"""
name = 'EmacsLisp'
- aliases = ['emacs-lisp', 'elisp', 'emacs']
+ aliases = ['emacs-lisp', 'elisp', 'emacs']
filenames = ['*.el']
mimetypes = ['text/x-elisp', 'application/x-elisp']
@@ -1579,7 +1579,7 @@ class EmacsLispLexer(RegexLexer):
# Take a deep breath...
symbol = r'((?:%s)(?:%s)*)' % (nonmacro, constituent)
- macros = {
+ macros = {
'atomic-change-group', 'case', 'block', 'cl-block', 'cl-callf', 'cl-callf2',
'cl-case', 'cl-decf', 'cl-declaim', 'cl-declare',
'cl-define-compiler-macro', 'cl-defmacro', 'cl-defstruct',
@@ -1626,17 +1626,17 @@ class EmacsLispLexer(RegexLexer):
'with-tramp-file-property', 'with-tramp-progress-reporter',
'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv',
'return-from',
- }
+ }
- special_forms = {
+ special_forms = {
'and', 'catch', 'cond', 'condition-case', 'defconst', 'defvar',
'function', 'if', 'interactive', 'let', 'let*', 'or', 'prog1',
'prog2', 'progn', 'quote', 'save-current-buffer', 'save-excursion',
'save-restriction', 'setq', 'setq-default', 'subr-arity',
'unwind-protect', 'while',
- }
+ }
- builtin_function = {
+ builtin_function = {
'%', '*', '+', '-', '/', '/=', '1+', '1-', '<', '<=', '=', '>', '>=',
'Snarf-documentation', 'abort-recursive-edit', 'abs',
'accept-process-output', 'access-file', 'accessible-keymaps', 'acos',
@@ -1962,9 +1962,9 @@ class EmacsLispLexer(RegexLexer):
'split-window-internal', 'sqrt', 'standard-case-table',
'standard-category-table', 'standard-syntax-table', 'start-kbd-macro',
'start-process', 'stop-process', 'store-kbd-macro-event', 'string',
- 'string=', 'string<', 'string>', 'string-as-multibyte',
- 'string-as-unibyte', 'string-bytes', 'string-collate-equalp',
- 'string-collate-lessp', 'string-equal', 'string-greaterp',
+ 'string=', 'string<', 'string>', 'string-as-multibyte',
+ 'string-as-unibyte', 'string-bytes', 'string-collate-equalp',
+ 'string-collate-lessp', 'string-equal', 'string-greaterp',
'string-lessp', 'string-make-multibyte', 'string-make-unibyte',
'string-match', 'string-to-char', 'string-to-multibyte',
'string-to-number', 'string-to-syntax', 'string-to-unibyte',
@@ -2076,23 +2076,23 @@ class EmacsLispLexer(RegexLexer):
'xw-color-values', 'xw-display-color-p', 'xw-display-color-p',
'yes-or-no-p', 'zlib-available-p', 'zlib-decompress-region',
'forward-point',
- }
+ }
- builtin_function_highlighted = {
+ builtin_function_highlighted = {
'defvaralias', 'provide', 'require',
'with-no-warnings', 'define-widget', 'with-electric-help',
'throw', 'defalias', 'featurep'
- }
+ }
- lambda_list_keywords = {
+ lambda_list_keywords = {
'&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
'&rest', '&whole',
- }
+ }
- error_keywords = {
+ error_keywords = {
'cl-assert', 'cl-check-type', 'error', 'signal',
'user-error', 'warn',
- }
+ }
def get_tokens_unprocessed(self, text):
stack = ['root']
@@ -2251,7 +2251,7 @@ class ShenLexer(RegexLexer):
BUILTINS_ANYWHERE = ('where', 'skip', '>>', '_', '!', '<e>', '<!>')
- MAPPINGS = {s: Keyword for s in DECLARATIONS}
+ MAPPINGS = {s: Keyword for s in DECLARATIONS}
MAPPINGS.update((s, Name.Builtin) for s in BUILTINS)
MAPPINGS.update((s, Keyword) for s in SPECIAL_FORMS)
@@ -2302,7 +2302,7 @@ class ShenLexer(RegexLexer):
if self._relevant(token):
if opening_paren and token == Keyword and value in self.DECLARATIONS:
declaration = value
- yield from self._process_declaration(declaration, tokens)
+ yield from self._process_declaration(declaration, tokens)
opening_paren = value == '(' and token == Punctuation
def _process_symbols(self, tokens):
@@ -2408,7 +2408,7 @@ class CPSALexer(RegexLexer):
# (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
# strings, symbols and characters
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r"'" + valid_name, String.Symbol),
(r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char),
@@ -2621,7 +2621,7 @@ class XtlangLexer(RegexLexer):
(r'(#b|#o|#x)[\d.]+', Number),
# strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# true/false constants
(r'(#t|#f)', Name.Constant),
@@ -2657,35 +2657,35 @@ class FennelLexer(RegexLexer):
aliases = ['fennel', 'fnl']
filenames = ['*.fnl']
- # this list is current as of Fennel version 0.10.0.
+ # this list is current as of Fennel version 0.10.0.
special_forms = (
- '#', '%', '*', '+', '-', '->', '->>', '-?>', '-?>>', '.', '..',
- '/', '//', ':', '<', '<=', '=', '>', '>=', '?.', '^', 'accumulate',
- 'and', 'band', 'bnot', 'bor', 'bxor', 'collect', 'comment', 'do', 'doc',
- 'doto', 'each', 'eval-compiler', 'for', 'hashfn', 'icollect', 'if',
- 'import-macros', 'include', 'length', 'let', 'lshift', 'lua',
- 'macrodebug', 'match', 'not', 'not=', 'or', 'partial', 'pick-args',
- 'pick-values', 'quote', 'require-macros', 'rshift', 'set',
- 'set-forcibly!', 'tset', 'values', 'when', 'while', 'with-open', '~='
- )
-
- declarations = (
- 'fn', 'global', 'lambda', 'local', 'macro', 'macros', 'var', 'λ'
+ '#', '%', '*', '+', '-', '->', '->>', '-?>', '-?>>', '.', '..',
+ '/', '//', ':', '<', '<=', '=', '>', '>=', '?.', '^', 'accumulate',
+ 'and', 'band', 'bnot', 'bor', 'bxor', 'collect', 'comment', 'do', 'doc',
+ 'doto', 'each', 'eval-compiler', 'for', 'hashfn', 'icollect', 'if',
+ 'import-macros', 'include', 'length', 'let', 'lshift', 'lua',
+ 'macrodebug', 'match', 'not', 'not=', 'or', 'partial', 'pick-args',
+ 'pick-values', 'quote', 'require-macros', 'rshift', 'set',
+ 'set-forcibly!', 'tset', 'values', 'when', 'while', 'with-open', '~='
)
+ declarations = (
+ 'fn', 'global', 'lambda', 'local', 'macro', 'macros', 'var', 'λ'
+ )
+
builtins = (
- '_G', '_VERSION', 'arg', 'assert', 'bit32', 'collectgarbage',
- 'coroutine', 'debug', 'dofile', 'error', 'getfenv',
- 'getmetatable', 'io', 'ipairs', 'load', 'loadfile', 'loadstring',
- 'math', 'next', 'os', 'package', 'pairs', 'pcall', 'print',
- 'rawequal', 'rawget', 'rawlen', 'rawset', 'require', 'select',
- 'setfenv', 'setmetatable', 'string', 'table', 'tonumber',
- 'tostring', 'type', 'unpack', 'xpcall'
+ '_G', '_VERSION', 'arg', 'assert', 'bit32', 'collectgarbage',
+ 'coroutine', 'debug', 'dofile', 'error', 'getfenv',
+ 'getmetatable', 'io', 'ipairs', 'load', 'loadfile', 'loadstring',
+ 'math', 'next', 'os', 'package', 'pairs', 'pcall', 'print',
+ 'rawequal', 'rawget', 'rawlen', 'rawset', 'require', 'select',
+ 'setfenv', 'setmetatable', 'string', 'table', 'tonumber',
+ 'tostring', 'type', 'unpack', 'xpcall'
)
- # based on the scheme definition, but disallowing leading digits and
- # commas, and @ is not allowed.
- valid_name = r'[a-zA-Z_!$%&*+/:<=>?^~|-][\w!$%&*+/:<=>?^~|\.-]*'
+ # based on the scheme definition, but disallowing leading digits and
+ # commas, and @ is not allowed.
+ valid_name = r'[a-zA-Z_!$%&*+/:<=>?^~|-][\w!$%&*+/:<=>?^~|\.-]*'
tokens = {
'root': [
@@ -2696,10 +2696,10 @@ class FennelLexer(RegexLexer):
(r'-?\d+\.\d+', Number.Float),
(r'-?\d+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-
- (r'(true|false|nil)', Name.Constant),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'(true|false|nil)', Name.Constant),
+
# these are technically strings, but it's worth visually
# distinguishing them because their intent is different
# from regular strings.
@@ -2707,8 +2707,8 @@ class FennelLexer(RegexLexer):
# special forms are keywords
(words(special_forms, suffix=' '), Keyword),
- # these are ... even more special!
- (words(declarations, suffix=' '), Keyword.Declaration),
+ # these are ... even more special!
+ (words(declarations, suffix=' '), Keyword.Declaration),
# lua standard library are builtins
(words(builtins, suffix=' '), Name.Builtin),
# special-case the vararg symbol
@@ -2720,8 +2720,8 @@ class FennelLexer(RegexLexer):
(r'(\(|\))', Punctuation),
(r'(\[|\])', Punctuation),
(r'(\{|\})', Punctuation),
-
- # the # symbol is shorthand for a lambda function
- (r'#', Punctuation),
+
+ # the # symbol is shorthand for a lambda function
+ (r'#', Punctuation),
]
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/make.py b/contrib/python/Pygments/py3/pygments/lexers/make.py
index 3e317e819e..30cdbe25d5 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/make.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/make.py
@@ -4,7 +4,7 @@
Lexers for Makefiles and similar.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -56,7 +56,7 @@ class MakefileLexer(Lexer):
ins.append((len(done), [(0, Comment, line)]))
else:
done += line
- yield from do_insertions(ins, lex.get_tokens_unprocessed(done))
+ yield from do_insertions(ins, lex.get_tokens_unprocessed(done))
def analyse_text(text):
# Many makefiles have $(BIG_CAPS) style variables
@@ -91,8 +91,8 @@ class BaseMakefileLexer(RegexLexer):
(r'([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
# strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
# targets
(r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text),
'block-header'),
@@ -194,12 +194,12 @@ class CMakeLexer(RegexLexer):
}
def analyse_text(text):
- exp = (
- r'^[ \t]*CMAKE_MINIMUM_REQUIRED[ \t]*'
- r'\([ \t]*VERSION[ \t]*\d+(\.\d+)*[ \t]*'
- r'([ \t]FATAL_ERROR)?[ \t]*\)[ \t]*'
- r'(#[^\n]*)?$'
- )
+ exp = (
+ r'^[ \t]*CMAKE_MINIMUM_REQUIRED[ \t]*'
+ r'\([ \t]*VERSION[ \t]*\d+(\.\d+)*[ \t]*'
+ r'([ \t]FATAL_ERROR)?[ \t]*\)[ \t]*'
+ r'(#[^\n]*)?$'
+ )
if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE):
return 0.8
return 0.0
diff --git a/contrib/python/Pygments/py3/pygments/lexers/markup.py b/contrib/python/Pygments/py3/pygments/lexers/markup.py
index e1a8429ef0..4bf1427446 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/markup.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/markup.py
@@ -4,7 +4,7 @@
Lexers for non-HTML markup languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,7 +23,7 @@ from pygments.util import get_bool_opt, ClassNotFound
__all__ = ['BBCodeLexer', 'MoinWikiLexer', 'RstLexer', 'TexLexer', 'GroffLexer',
'MozPreprocHashLexer', 'MozPreprocPercentLexer',
'MozPreprocXulLexer', 'MozPreprocJavascriptLexer',
- 'MozPreprocCssLexer', 'MarkdownLexer', 'TiddlyWiki5Lexer']
+ 'MozPreprocCssLexer', 'MarkdownLexer', 'TiddlyWiki5Lexer']
class BBCodeLexer(RegexLexer):
@@ -121,7 +121,7 @@ class RstLexer(RegexLexer):
.. versionadded:: 0.8
"""
name = 'reStructuredText'
- aliases = ['restructuredtext', 'rst', 'rest']
+ aliases = ['restructuredtext', 'rst', 'rest']
filenames = ['*.rst', '*.rest']
mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
flags = re.MULTILINE
@@ -164,11 +164,11 @@ class RstLexer(RegexLexer):
code += line[indention_size:]
else:
code += line
- yield from do_insertions(ins, lexer.get_tokens_unprocessed(code))
+ yield from do_insertions(ins, lexer.get_tokens_unprocessed(code))
# from docutils.parsers.rst.states
- closers = '\'")]}>\u2019\u201d\xbb!?'
- unicode_delimiters = '\u2010\u2011\u2012\u2013\u2014\u00a0'
+ closers = '\'")]}>\u2019\u201d\xbb!?'
+ unicode_delimiters = '\u2010\u2011\u2012\u2013\u2014\u00a0'
end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
% (re.escape(unicode_delimiters),
re.escape(closers)))
@@ -202,7 +202,7 @@ class RstLexer(RegexLexer):
bygroups(Text, Operator, using(this, state='inline'))),
# Sourcecode directives
(r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)'
- r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*)?\n)+)',
+ r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*)?\n)+)',
_handle_sourcecode),
# A directive
(r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
@@ -220,14 +220,14 @@ class RstLexer(RegexLexer):
Punctuation, Text, using(this, state='inline'))),
# Comments
(r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
- # Field list marker
- (r'^( *)(:(?:\\\\|\\:|[^:\n])+:(?=\s))([ \t]*)',
- bygroups(Text, Name.Class, Text)),
+ # Field list marker
+ (r'^( *)(:(?:\\\\|\\:|[^:\n])+:(?=\s))([ \t]*)',
+ bygroups(Text, Name.Class, Text)),
# Definition list
(r'^(\S.*(?<!::)\n)((?:(?: +.*)\n)+)',
bygroups(using(this, state='inline'), using(this, state='inline'))),
# Code blocks
- (r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*)?\n)+)',
+ (r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*)?\n)+)',
bygroups(String.Escape, Text, String, String, Text, String)),
include('inline'),
],
@@ -460,7 +460,7 @@ class MozPreprocXulLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super().__init__(XmlLexer, MozPreprocHashLexer, **options)
+ super().__init__(XmlLexer, MozPreprocHashLexer, **options)
class MozPreprocJavascriptLexer(DelegatingLexer):
@@ -476,7 +476,7 @@ class MozPreprocJavascriptLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super().__init__(JavascriptLexer, MozPreprocHashLexer, **options)
+ super().__init__(JavascriptLexer, MozPreprocHashLexer, **options)
class MozPreprocCssLexer(DelegatingLexer):
@@ -492,7 +492,7 @@ class MozPreprocCssLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super().__init__(CssLexer, MozPreprocPercentLexer, **options)
+ super().__init__(CssLexer, MozPreprocPercentLexer, **options)
class MarkdownLexer(RegexLexer):
@@ -501,9 +501,9 @@ class MarkdownLexer(RegexLexer):
.. versionadded:: 2.2
"""
- name = 'Markdown'
- aliases = ['markdown', 'md']
- filenames = ['*.md', '*.markdown']
+ name = 'Markdown'
+ aliases = ['markdown', 'md']
+ filenames = ['*.md', '*.markdown']
mimetypes = ["text/x-markdown"]
flags = re.MULTILINE
@@ -514,9 +514,9 @@ class MarkdownLexer(RegexLexer):
from pygments.lexers import get_lexer_by_name
# section header
- yield match.start(1), String.Backtick, match.group(1)
- yield match.start(2), String.Backtick, match.group(2)
- yield match.start(3), Text , match.group(3)
+ yield match.start(1), String.Backtick, match.group(1)
+ yield match.start(2), String.Backtick, match.group(2)
+ yield match.start(3), Text , match.group(3)
# lookup lexer if wanted and existing
lexer = None
@@ -531,67 +531,67 @@ class MarkdownLexer(RegexLexer):
if lexer is None:
yield match.start(4), String, code
else:
- yield from do_insertions([], lexer.get_tokens_unprocessed(code))
+ yield from do_insertions([], lexer.get_tokens_unprocessed(code))
- yield match.start(5), String.Backtick, match.group(5)
+ yield match.start(5), String.Backtick, match.group(5)
tokens = {
'root': [
- # heading with '#' prefix (atx-style)
- (r'(^#[^#].+)(\n)', bygroups(Generic.Heading, Text)),
- # subheading with '#' prefix (atx-style)
- (r'(^#{2,6}[^#].+)(\n)', bygroups(Generic.Subheading, Text)),
- # heading with '=' underlines (Setext-style)
- (r'^(.+)(\n)(=+)(\n)', bygroups(Generic.Heading, Text, Generic.Heading, Text)),
- # subheading with '-' underlines (Setext-style)
- (r'^(.+)(\n)(-+)(\n)', bygroups(Generic.Subheading, Text, Generic.Subheading, Text)),
+ # heading with '#' prefix (atx-style)
+ (r'(^#[^#].+)(\n)', bygroups(Generic.Heading, Text)),
+ # subheading with '#' prefix (atx-style)
+ (r'(^#{2,6}[^#].+)(\n)', bygroups(Generic.Subheading, Text)),
+ # heading with '=' underlines (Setext-style)
+ (r'^(.+)(\n)(=+)(\n)', bygroups(Generic.Heading, Text, Generic.Heading, Text)),
+ # subheading with '-' underlines (Setext-style)
+ (r'^(.+)(\n)(-+)(\n)', bygroups(Generic.Subheading, Text, Generic.Subheading, Text)),
# task list
(r'^(\s*)([*-] )(\[[ xX]\])( .+\n)',
bygroups(Text, Keyword, Keyword, using(this, state='inline'))),
- # bulleted list
+ # bulleted list
(r'^(\s*)([*-])(\s)(.+\n)',
bygroups(Text, Keyword, Text, using(this, state='inline'))),
- # numbered list
+ # numbered list
(r'^(\s*)([0-9]+\.)( .+\n)',
bygroups(Text, Keyword, using(this, state='inline'))),
# quote
(r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
- # code block fenced by 3 backticks
- (r'^(\s*```\n[\w\W]*?^\s*```$\n)', String.Backtick),
+ # code block fenced by 3 backticks
+ (r'^(\s*```\n[\w\W]*?^\s*```$\n)', String.Backtick),
# code block with language
- (r'^(\s*```)(\w+)(\n)([\w\W]*?)(^\s*```$\n)', _handle_codeblock),
+ (r'^(\s*```)(\w+)(\n)([\w\W]*?)(^\s*```$\n)', _handle_codeblock),
include('inline'),
],
'inline': [
# escape
(r'\\.', Text),
- # inline code
- (r'([^`]?)(`[^`\n]+`)', bygroups(Text, String.Backtick)),
- # warning: the following rules eat outer tags.
- # eg. **foo _bar_ baz** => foo and baz are not recognized as bold
- # bold fenced by '**'
- (r'([^\*]?)(\*\*[^* \n][^*\n]*\*\*)', bygroups(Text, Generic.Strong)),
- # bold fenced by '__'
- (r'([^_]?)(__[^_ \n][^_\n]*__)', bygroups(Text, Generic.Strong)),
- # italics fenced by '*'
- (r'([^\*]?)(\*[^* \n][^*\n]*\*)', bygroups(Text, Generic.Emph)),
- # italics fenced by '_'
- (r'([^_]?)(_[^_ \n][^_\n]*_)', bygroups(Text, Generic.Emph)),
+ # inline code
+ (r'([^`]?)(`[^`\n]+`)', bygroups(Text, String.Backtick)),
+ # warning: the following rules eat outer tags.
+ # eg. **foo _bar_ baz** => foo and baz are not recognized as bold
+ # bold fenced by '**'
+ (r'([^\*]?)(\*\*[^* \n][^*\n]*\*\*)', bygroups(Text, Generic.Strong)),
+ # bold fenced by '__'
+ (r'([^_]?)(__[^_ \n][^_\n]*__)', bygroups(Text, Generic.Strong)),
+ # italics fenced by '*'
+ (r'([^\*]?)(\*[^* \n][^*\n]*\*)', bygroups(Text, Generic.Emph)),
+ # italics fenced by '_'
+ (r'([^_]?)(_[^_ \n][^_\n]*_)', bygroups(Text, Generic.Emph)),
# strikethrough
- (r'([^~]?)(~~[^~ \n][^~\n]*~~)', bygroups(Text, Generic.Deleted)),
+ (r'([^~]?)(~~[^~ \n][^~\n]*~~)', bygroups(Text, Generic.Deleted)),
# mentions and topics (twitter and github stuff)
(r'[@#][\w/:]+', Name.Entity),
# (image?) links eg: ![Image of Yaktocat](https://octodex.github.com/images/yaktocat.png)
- (r'(!?\[)([^]]+)(\])(\()([^)]+)(\))',
- bygroups(Text, Name.Tag, Text, Text, Name.Attribute, Text)),
+ (r'(!?\[)([^]]+)(\])(\()([^)]+)(\))',
+ bygroups(Text, Name.Tag, Text, Text, Name.Attribute, Text)),
# reference-style links, e.g.:
# [an example][id]
# [id]: http://example.com/
- (r'(\[)([^]]+)(\])(\[)([^]]*)(\])',
- bygroups(Text, Name.Tag, Text, Text, Name.Label, Text)),
- (r'^(\s*\[)([^]]*)(\]:\s*)(.+)',
- bygroups(Text, Name.Label, Text, Name.Attribute)),
+ (r'(\[)([^]]+)(\])(\[)([^]]*)(\])',
+ bygroups(Text, Name.Tag, Text, Text, Name.Label, Text)),
+ (r'^(\s*\[)([^]]*)(\]:\s*)(.+)',
+ bygroups(Text, Name.Label, Text, Name.Attribute)),
# general text, must come last!
(r'[^\\\s]+', Text),
@@ -602,161 +602,161 @@ class MarkdownLexer(RegexLexer):
def __init__(self, **options):
self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
RegexLexer.__init__(self, **options)
-
-
-class TiddlyWiki5Lexer(RegexLexer):
- """
- For `TiddlyWiki5 <https://tiddlywiki.com/#TiddlerFiles>`_ markup.
-
- .. versionadded:: 2.7
- """
- name = 'tiddler'
- aliases = ['tid']
- filenames = ['*.tid']
- mimetypes = ["text/vnd.tiddlywiki"]
- flags = re.MULTILINE
-
- def _handle_codeblock(self, match):
- """
- match args: 1:backticks, 2:lang_name, 3:newline, 4:code, 5:backticks
- """
- from pygments.lexers import get_lexer_by_name
-
- # section header
- yield match.start(1), String, match.group(1)
- yield match.start(2), String, match.group(2)
- yield match.start(3), Text, match.group(3)
-
- # lookup lexer if wanted and existing
- lexer = None
- if self.handlecodeblocks:
- try:
- lexer = get_lexer_by_name(match.group(2).strip())
- except ClassNotFound:
- pass
- code = match.group(4)
-
- # no lexer for this language. handle it like it was a code block
- if lexer is None:
- yield match.start(4), String, code
- return
-
- yield from do_insertions([], lexer.get_tokens_unprocessed(code))
-
- yield match.start(5), String, match.group(5)
-
- def _handle_cssblock(self, match):
- """
- match args: 1:style tag 2:newline, 3:code, 4:closing style tag
- """
- from pygments.lexers import get_lexer_by_name
-
- # section header
- yield match.start(1), String, match.group(1)
- yield match.start(2), String, match.group(2)
-
- lexer = None
- if self.handlecodeblocks:
- try:
- lexer = get_lexer_by_name('css')
- except ClassNotFound:
- pass
- code = match.group(3)
-
- # no lexer for this language. handle it like it was a code block
- if lexer is None:
- yield match.start(3), String, code
- return
-
- yield from do_insertions([], lexer.get_tokens_unprocessed(code))
-
- yield match.start(4), String, match.group(4)
-
- tokens = {
- 'root': [
- # title in metadata section
- (r'^(title)(:\s)(.+\n)', bygroups(Keyword, Text, Generic.Heading)),
- # headings
- (r'^(!)([^!].+\n)', bygroups(Generic.Heading, Text)),
- (r'^(!{2,6})(.+\n)', bygroups(Generic.Subheading, Text)),
- # bulleted or numbered lists or single-line block quotes
- # (can be mixed)
- (r'^(\s*)([*#>]+)(\s*)(.+\n)',
- bygroups(Text, Keyword, Text, using(this, state='inline'))),
- # multi-line block quotes
- (r'^(<<<.*\n)([\w\W]*?)(^<<<.*$)', bygroups(String, Text, String)),
- # table header
- (r'^(\|.*?\|h)$', bygroups(Generic.Strong)),
- # table footer or caption
- (r'^(\|.*?\|[cf])$', bygroups(Generic.Emph)),
- # table class
- (r'^(\|.*?\|k)$', bygroups(Name.Tag)),
- # definitions
- (r'^(;.*)$', bygroups(Generic.Strong)),
- # text block
- (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
- # code block with language
- (r'^(```)(\w+)(\n)([\w\W]*?)(^```$)', _handle_codeblock),
- # CSS style block
- (r'^(<style>)(\n)([\w\W]*?)(^</style>$)', _handle_cssblock),
-
- include('keywords'),
- include('inline'),
- ],
- 'keywords': [
- (words((
- '\\define', '\\end', 'caption', 'created', 'modified', 'tags',
- 'title', 'type'), prefix=r'^', suffix=r'\b'),
- Keyword),
- ],
- 'inline': [
- # escape
- (r'\\.', Text),
- # created or modified date
- (r'\d{17}', Number.Integer),
- # italics
- (r'(\s)(//[^/]+//)((?=\W|\n))',
- bygroups(Text, Generic.Emph, Text)),
- # superscript
- (r'(\s)(\^\^[^\^]+\^\^)', bygroups(Text, Generic.Emph)),
- # subscript
- (r'(\s)(,,[^,]+,,)', bygroups(Text, Generic.Emph)),
- # underscore
- (r'(\s)(__[^_]+__)', bygroups(Text, Generic.Strong)),
- # bold
- (r"(\s)(''[^']+'')((?=\W|\n))",
- bygroups(Text, Generic.Strong, Text)),
- # strikethrough
- (r'(\s)(~~[^~]+~~)((?=\W|\n))',
- bygroups(Text, Generic.Deleted, Text)),
- # TiddlyWiki variables
- (r'<<[^>]+>>', Name.Tag),
- (r'\$\$[^$]+\$\$', Name.Tag),
- (r'\$\([^)]+\)\$', Name.Tag),
- # TiddlyWiki style or class
- (r'^@@.*$', Name.Tag),
- # HTML tags
- (r'</?[^>]+>', Name.Tag),
- # inline code
- (r'`[^`]+`', String.Backtick),
- # HTML escaped symbols
- (r'&\S*?;', String.Regex),
- # Wiki links
- (r'(\[{2})([^]\|]+)(\]{2})', bygroups(Text, Name.Tag, Text)),
- # External links
- (r'(\[{2})([^]\|]+)(\|)([^]\|]+)(\]{2})',
- bygroups(Text, Name.Tag, Text, Name.Attribute, Text)),
- # Transclusion
- (r'(\{{2})([^}]+)(\}{2})', bygroups(Text, Name.Tag, Text)),
- # URLs
- (r'(\b.?.?tps?://[^\s"]+)', bygroups(Name.Attribute)),
-
- # general text, must come last!
- (r'[\w]+', Text),
- (r'.', Text)
- ],
- }
-
- def __init__(self, **options):
- self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
- RegexLexer.__init__(self, **options)
+
+
+class TiddlyWiki5Lexer(RegexLexer):
+ """
+ For `TiddlyWiki5 <https://tiddlywiki.com/#TiddlerFiles>`_ markup.
+
+ .. versionadded:: 2.7
+ """
+ name = 'tiddler'
+ aliases = ['tid']
+ filenames = ['*.tid']
+ mimetypes = ["text/vnd.tiddlywiki"]
+ flags = re.MULTILINE
+
+ def _handle_codeblock(self, match):
+ """
+ match args: 1:backticks, 2:lang_name, 3:newline, 4:code, 5:backticks
+ """
+ from pygments.lexers import get_lexer_by_name
+
+ # section header
+ yield match.start(1), String, match.group(1)
+ yield match.start(2), String, match.group(2)
+ yield match.start(3), Text, match.group(3)
+
+ # lookup lexer if wanted and existing
+ lexer = None
+ if self.handlecodeblocks:
+ try:
+ lexer = get_lexer_by_name(match.group(2).strip())
+ except ClassNotFound:
+ pass
+ code = match.group(4)
+
+ # no lexer for this language. handle it like it was a code block
+ if lexer is None:
+ yield match.start(4), String, code
+ return
+
+ yield from do_insertions([], lexer.get_tokens_unprocessed(code))
+
+ yield match.start(5), String, match.group(5)
+
+ def _handle_cssblock(self, match):
+ """
+ match args: 1:style tag 2:newline, 3:code, 4:closing style tag
+ """
+ from pygments.lexers import get_lexer_by_name
+
+ # section header
+ yield match.start(1), String, match.group(1)
+ yield match.start(2), String, match.group(2)
+
+ lexer = None
+ if self.handlecodeblocks:
+ try:
+ lexer = get_lexer_by_name('css')
+ except ClassNotFound:
+ pass
+ code = match.group(3)
+
+ # no lexer for this language. handle it like it was a code block
+ if lexer is None:
+ yield match.start(3), String, code
+ return
+
+ yield from do_insertions([], lexer.get_tokens_unprocessed(code))
+
+ yield match.start(4), String, match.group(4)
+
+ tokens = {
+ 'root': [
+ # title in metadata section
+ (r'^(title)(:\s)(.+\n)', bygroups(Keyword, Text, Generic.Heading)),
+ # headings
+ (r'^(!)([^!].+\n)', bygroups(Generic.Heading, Text)),
+ (r'^(!{2,6})(.+\n)', bygroups(Generic.Subheading, Text)),
+ # bulleted or numbered lists or single-line block quotes
+ # (can be mixed)
+ (r'^(\s*)([*#>]+)(\s*)(.+\n)',
+ bygroups(Text, Keyword, Text, using(this, state='inline'))),
+ # multi-line block quotes
+ (r'^(<<<.*\n)([\w\W]*?)(^<<<.*$)', bygroups(String, Text, String)),
+ # table header
+ (r'^(\|.*?\|h)$', bygroups(Generic.Strong)),
+ # table footer or caption
+ (r'^(\|.*?\|[cf])$', bygroups(Generic.Emph)),
+ # table class
+ (r'^(\|.*?\|k)$', bygroups(Name.Tag)),
+ # definitions
+ (r'^(;.*)$', bygroups(Generic.Strong)),
+ # text block
+ (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
+ # code block with language
+ (r'^(```)(\w+)(\n)([\w\W]*?)(^```$)', _handle_codeblock),
+ # CSS style block
+ (r'^(<style>)(\n)([\w\W]*?)(^</style>$)', _handle_cssblock),
+
+ include('keywords'),
+ include('inline'),
+ ],
+ 'keywords': [
+ (words((
+ '\\define', '\\end', 'caption', 'created', 'modified', 'tags',
+ 'title', 'type'), prefix=r'^', suffix=r'\b'),
+ Keyword),
+ ],
+ 'inline': [
+ # escape
+ (r'\\.', Text),
+ # created or modified date
+ (r'\d{17}', Number.Integer),
+ # italics
+ (r'(\s)(//[^/]+//)((?=\W|\n))',
+ bygroups(Text, Generic.Emph, Text)),
+ # superscript
+ (r'(\s)(\^\^[^\^]+\^\^)', bygroups(Text, Generic.Emph)),
+ # subscript
+ (r'(\s)(,,[^,]+,,)', bygroups(Text, Generic.Emph)),
+ # underscore
+ (r'(\s)(__[^_]+__)', bygroups(Text, Generic.Strong)),
+ # bold
+ (r"(\s)(''[^']+'')((?=\W|\n))",
+ bygroups(Text, Generic.Strong, Text)),
+ # strikethrough
+ (r'(\s)(~~[^~]+~~)((?=\W|\n))',
+ bygroups(Text, Generic.Deleted, Text)),
+ # TiddlyWiki variables
+ (r'<<[^>]+>>', Name.Tag),
+ (r'\$\$[^$]+\$\$', Name.Tag),
+ (r'\$\([^)]+\)\$', Name.Tag),
+ # TiddlyWiki style or class
+ (r'^@@.*$', Name.Tag),
+ # HTML tags
+ (r'</?[^>]+>', Name.Tag),
+ # inline code
+ (r'`[^`]+`', String.Backtick),
+ # HTML escaped symbols
+ (r'&\S*?;', String.Regex),
+ # Wiki links
+ (r'(\[{2})([^]\|]+)(\]{2})', bygroups(Text, Name.Tag, Text)),
+ # External links
+ (r'(\[{2})([^]\|]+)(\|)([^]\|]+)(\]{2})',
+ bygroups(Text, Name.Tag, Text, Name.Attribute, Text)),
+ # Transclusion
+ (r'(\{{2})([^}]+)(\}{2})', bygroups(Text, Name.Tag, Text)),
+ # URLs
+ (r'(\b.?.?tps?://[^\s"]+)', bygroups(Name.Attribute)),
+
+ # general text, must come last!
+ (r'[\w]+', Text),
+ (r'.', Text)
+ ],
+ }
+
+ def __init__(self, **options):
+ self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
+ RegexLexer.__init__(self, **options)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/math.py b/contrib/python/Pygments/py3/pygments/lexers/math.py
index 88f810e70f..82f0a91c8e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/math.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/math.py
@@ -4,7 +4,7 @@
Just export lexers that were contained in this module.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/matlab.py b/contrib/python/Pygments/py3/pygments/lexers/matlab.py
index 445063935b..82ec9b22c8 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/matlab.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/matlab.py
@@ -4,14 +4,14 @@
Lexers for Matlab and related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import Lexer, RegexLexer, bygroups, default, words, \
- do_insertions, include
+from pygments.lexer import Lexer, RegexLexer, bygroups, default, words, \
+ do_insertions, include
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Whitespace
@@ -31,2644 +31,2644 @@ class MatlabLexer(RegexLexer):
filenames = ['*.m']
mimetypes = ['text/matlab']
- _operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\./|/|\\'
-
+ _operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\./|/|\\'
+
tokens = {
- 'expressions': [
- # operators:
- (_operators, Operator),
-
- # numbers (must come before punctuation to handle `.5`; cannot use
- # `\b` due to e.g. `5. + .5`). The negative lookahead on operators
- # avoids including the dot in `1./x` (the dot is part of `./`).
- (r'(?<!\w)((\d+\.\d+)|(\d*\.\d+)|(\d+\.(?!%s)))'
- r'([eEf][+-]?\d+)?(?!\w)' % _operators, Number.Float),
- (r'\b\d+[eEf][+-]?[0-9]+\b', Number.Float),
- (r'\b\d+\b', Number.Integer),
-
- # punctuation:
- (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
- (r'=|:|;', Punctuation),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w)\].])\'+', Operator),
-
- (r'"(""|[^"])*"', String),
-
- (r'(?<![\w)\].])\'', String, 'string'),
- (r'[a-zA-Z_]\w*', Name),
- (r'\s+', Whitespace),
- (r'.', Text),
- ],
+ 'expressions': [
+ # operators:
+ (_operators, Operator),
+
+ # numbers (must come before punctuation to handle `.5`; cannot use
+ # `\b` due to e.g. `5. + .5`). The negative lookahead on operators
+ # avoids including the dot in `1./x` (the dot is part of `./`).
+ (r'(?<!\w)((\d+\.\d+)|(\d*\.\d+)|(\d+\.(?!%s)))'
+ r'([eEf][+-]?\d+)?(?!\w)' % _operators, Number.Float),
+ (r'\b\d+[eEf][+-]?[0-9]+\b', Number.Float),
+ (r'\b\d+\b', Number.Integer),
+
+ # punctuation:
+ (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
+ (r'=|:|;', Punctuation),
+
+ # quote can be transpose, instead of string:
+ # (not great, but handles common cases...)
+ (r'(?<=[\w)\].])\'+', Operator),
+
+ (r'"(""|[^"])*"', String),
+
+ (r'(?<![\w)\].])\'', String, 'string'),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'\s+', Whitespace),
+ (r'.', Text),
+ ],
'root': [
# line starting with '!' is sent as a system command. not sure what
# label to use...
(r'^!.*', String.Other),
(r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
(r'%.*$', Comment),
- (r'(\s*^\s*)(function)\b', bygroups(Whitespace, Keyword), 'deffunc'),
- (r'(\s*^\s*)(properties)(\s+)(\()',
- bygroups(Whitespace, Keyword, Whitespace, Punctuation),
- ('defprops', 'propattrs')),
- (r'(\s*^\s*)(properties)\b',
- bygroups(Whitespace, Keyword), 'defprops'),
-
- # from 'iskeyword' on version 9.4 (R2018a):
- # Check that there is no preceding dot, as keywords are valid field
- # names.
- (words(('break', 'case', 'catch', 'classdef', 'continue',
- 'dynamicprops', 'else', 'elseif', 'end', 'for', 'function',
- 'global', 'if', 'methods', 'otherwise', 'parfor',
- 'persistent', 'return', 'spmd', 'switch',
- 'try', 'while'),
- prefix=r'(?<!\.)(\s*)(', suffix=r')\b'),
- bygroups(Whitespace, Keyword)),
-
- (
- words(
- [
- # See https://mathworks.com/help/matlab/referencelist.html
- # Below data from 2021-02-10T18:24:08Z
- # for Matlab release R2020b
- "BeginInvoke",
- "COM",
- "Combine",
- "CombinedDatastore",
- "EndInvoke",
- "Execute",
- "FactoryGroup",
- "FactorySetting",
- "Feval",
- "FunctionTestCase",
- "GetCharArray",
- "GetFullMatrix",
- "GetVariable",
- "GetWorkspaceData",
- "GraphPlot",
- "H5.close",
- "H5.garbage_collect",
- "H5.get_libversion",
- "H5.open",
- "H5.set_free_list_limits",
- "H5A.close",
- "H5A.create",
- "H5A.delete",
- "H5A.get_info",
- "H5A.get_name",
- "H5A.get_space",
- "H5A.get_type",
- "H5A.iterate",
- "H5A.open",
- "H5A.open_by_idx",
- "H5A.open_by_name",
- "H5A.read",
- "H5A.write",
- "H5D.close",
- "H5D.create",
- "H5D.get_access_plist",
- "H5D.get_create_plist",
- "H5D.get_offset",
- "H5D.get_space",
- "H5D.get_space_status",
- "H5D.get_storage_size",
- "H5D.get_type",
- "H5D.open",
- "H5D.read",
- "H5D.set_extent",
- "H5D.vlen_get_buf_size",
- "H5D.write",
- "H5DS.attach_scale",
- "H5DS.detach_scale",
- "H5DS.get_label",
- "H5DS.get_num_scales",
- "H5DS.get_scale_name",
- "H5DS.is_scale",
- "H5DS.iterate_scales",
- "H5DS.set_label",
- "H5DS.set_scale",
- "H5E.clear",
- "H5E.get_major",
- "H5E.get_minor",
- "H5E.walk",
- "H5F.close",
- "H5F.create",
- "H5F.flush",
- "H5F.get_access_plist",
- "H5F.get_create_plist",
- "H5F.get_filesize",
- "H5F.get_freespace",
- "H5F.get_info",
- "H5F.get_mdc_config",
- "H5F.get_mdc_hit_rate",
- "H5F.get_mdc_size",
- "H5F.get_name",
- "H5F.get_obj_count",
- "H5F.get_obj_ids",
- "H5F.is_hdf5",
- "H5F.mount",
- "H5F.open",
- "H5F.reopen",
- "H5F.set_mdc_config",
- "H5F.unmount",
- "H5G.close",
- "H5G.create",
- "H5G.get_info",
- "H5G.open",
- "H5I.dec_ref",
- "H5I.get_file_id",
- "H5I.get_name",
- "H5I.get_ref",
- "H5I.get_type",
- "H5I.inc_ref",
- "H5I.is_valid",
- "H5L.copy",
- "H5L.create_external",
- "H5L.create_hard",
- "H5L.create_soft",
- "H5L.delete",
- "H5L.exists",
- "H5L.get_info",
- "H5L.get_name_by_idx",
- "H5L.get_val",
- "H5L.iterate",
- "H5L.iterate_by_name",
- "H5L.move",
- "H5L.visit",
- "H5L.visit_by_name",
- "H5ML.compare_values",
- "H5ML.get_constant_names",
- "H5ML.get_constant_value",
- "H5ML.get_function_names",
- "H5ML.get_mem_datatype",
- "H5O.close",
- "H5O.copy",
- "H5O.get_comment",
- "H5O.get_comment_by_name",
- "H5O.get_info",
- "H5O.link",
- "H5O.open",
- "H5O.open_by_idx",
- "H5O.set_comment",
- "H5O.set_comment_by_name",
- "H5O.visit",
- "H5O.visit_by_name",
- "H5P.all_filters_avail",
- "H5P.close",
- "H5P.close_class",
- "H5P.copy",
- "H5P.create",
- "H5P.equal",
- "H5P.exist",
- "H5P.fill_value_defined",
- "H5P.get",
- "H5P.get_alignment",
- "H5P.get_alloc_time",
- "H5P.get_attr_creation_order",
- "H5P.get_attr_phase_change",
- "H5P.get_btree_ratios",
- "H5P.get_char_encoding",
- "H5P.get_chunk",
- "H5P.get_chunk_cache",
- "H5P.get_class",
- "H5P.get_class_name",
- "H5P.get_class_parent",
- "H5P.get_copy_object",
- "H5P.get_create_intermediate_group",
- "H5P.get_driver",
- "H5P.get_edc_check",
- "H5P.get_external",
- "H5P.get_external_count",
- "H5P.get_family_offset",
- "H5P.get_fapl_core",
- "H5P.get_fapl_family",
- "H5P.get_fapl_multi",
- "H5P.get_fclose_degree",
- "H5P.get_fill_time",
- "H5P.get_fill_value",
- "H5P.get_filter",
- "H5P.get_filter_by_id",
- "H5P.get_gc_references",
- "H5P.get_hyper_vector_size",
- "H5P.get_istore_k",
- "H5P.get_layout",
- "H5P.get_libver_bounds",
- "H5P.get_link_creation_order",
- "H5P.get_link_phase_change",
- "H5P.get_mdc_config",
- "H5P.get_meta_block_size",
- "H5P.get_multi_type",
- "H5P.get_nfilters",
- "H5P.get_nprops",
- "H5P.get_sieve_buf_size",
- "H5P.get_size",
- "H5P.get_sizes",
- "H5P.get_small_data_block_size",
- "H5P.get_sym_k",
- "H5P.get_userblock",
- "H5P.get_version",
- "H5P.isa_class",
- "H5P.iterate",
- "H5P.modify_filter",
- "H5P.remove_filter",
- "H5P.set",
- "H5P.set_alignment",
- "H5P.set_alloc_time",
- "H5P.set_attr_creation_order",
- "H5P.set_attr_phase_change",
- "H5P.set_btree_ratios",
- "H5P.set_char_encoding",
- "H5P.set_chunk",
- "H5P.set_chunk_cache",
- "H5P.set_copy_object",
- "H5P.set_create_intermediate_group",
- "H5P.set_deflate",
- "H5P.set_edc_check",
- "H5P.set_external",
- "H5P.set_family_offset",
- "H5P.set_fapl_core",
- "H5P.set_fapl_family",
- "H5P.set_fapl_log",
- "H5P.set_fapl_multi",
- "H5P.set_fapl_sec2",
- "H5P.set_fapl_split",
- "H5P.set_fapl_stdio",
- "H5P.set_fclose_degree",
- "H5P.set_fill_time",
- "H5P.set_fill_value",
- "H5P.set_filter",
- "H5P.set_fletcher32",
- "H5P.set_gc_references",
- "H5P.set_hyper_vector_size",
- "H5P.set_istore_k",
- "H5P.set_layout",
- "H5P.set_libver_bounds",
- "H5P.set_link_creation_order",
- "H5P.set_link_phase_change",
- "H5P.set_mdc_config",
- "H5P.set_meta_block_size",
- "H5P.set_multi_type",
- "H5P.set_nbit",
- "H5P.set_scaleoffset",
- "H5P.set_shuffle",
- "H5P.set_sieve_buf_size",
- "H5P.set_sizes",
- "H5P.set_small_data_block_size",
- "H5P.set_sym_k",
- "H5P.set_userblock",
- "H5R.create",
- "H5R.dereference",
- "H5R.get_name",
- "H5R.get_obj_type",
- "H5R.get_region",
- "H5S.close",
- "H5S.copy",
- "H5S.create",
- "H5S.create_simple",
- "H5S.extent_copy",
- "H5S.get_select_bounds",
- "H5S.get_select_elem_npoints",
- "H5S.get_select_elem_pointlist",
- "H5S.get_select_hyper_blocklist",
- "H5S.get_select_hyper_nblocks",
- "H5S.get_select_npoints",
- "H5S.get_select_type",
- "H5S.get_simple_extent_dims",
- "H5S.get_simple_extent_ndims",
- "H5S.get_simple_extent_npoints",
- "H5S.get_simple_extent_type",
- "H5S.is_simple",
- "H5S.offset_simple",
- "H5S.select_all",
- "H5S.select_elements",
- "H5S.select_hyperslab",
- "H5S.select_none",
- "H5S.select_valid",
- "H5S.set_extent_none",
- "H5S.set_extent_simple",
- "H5T.array_create",
- "H5T.close",
- "H5T.commit",
- "H5T.committed",
- "H5T.copy",
- "H5T.create",
- "H5T.detect_class",
- "H5T.enum_create",
- "H5T.enum_insert",
- "H5T.enum_nameof",
- "H5T.enum_valueof",
- "H5T.equal",
- "H5T.get_array_dims",
- "H5T.get_array_ndims",
- "H5T.get_class",
- "H5T.get_create_plist",
- "H5T.get_cset",
- "H5T.get_ebias",
- "H5T.get_fields",
- "H5T.get_inpad",
- "H5T.get_member_class",
- "H5T.get_member_index",
- "H5T.get_member_name",
- "H5T.get_member_offset",
- "H5T.get_member_type",
- "H5T.get_member_value",
- "H5T.get_native_type",
- "H5T.get_nmembers",
- "H5T.get_norm",
- "H5T.get_offset",
- "H5T.get_order",
- "H5T.get_pad",
- "H5T.get_precision",
- "H5T.get_sign",
- "H5T.get_size",
- "H5T.get_strpad",
- "H5T.get_super",
- "H5T.get_tag",
- "H5T.insert",
- "H5T.is_variable_str",
- "H5T.lock",
- "H5T.open",
- "H5T.pack",
- "H5T.set_cset",
- "H5T.set_ebias",
- "H5T.set_fields",
- "H5T.set_inpad",
- "H5T.set_norm",
- "H5T.set_offset",
- "H5T.set_order",
- "H5T.set_pad",
- "H5T.set_precision",
- "H5T.set_sign",
- "H5T.set_size",
- "H5T.set_strpad",
- "H5T.set_tag",
- "H5T.vlen_create",
- "H5Z.filter_avail",
- "H5Z.get_filter_info",
- "Inf",
- "KeyValueDatastore",
- "KeyValueStore",
- "MException",
- "MException.last",
- "MaximizeCommandWindow",
- "MemoizedFunction",
- "MinimizeCommandWindow",
- "NET",
- "NET.Assembly",
- "NET.GenericClass",
- "NET.NetException",
- "NET.addAssembly",
- "NET.convertArray",
- "NET.createArray",
- "NET.createGeneric",
- "NET.disableAutoRelease",
- "NET.enableAutoRelease",
- "NET.invokeGenericMethod",
- "NET.isNETSupported",
- "NET.setStaticProperty",
- "NaN",
- "NaT",
- "OperationResult",
- "PutCharArray",
- "PutFullMatrix",
- "PutWorkspaceData",
- "PythonEnvironment",
- "Quit",
- "RandStream",
- "ReleaseCompatibilityException",
- "ReleaseCompatibilityResults",
- "Remove",
- "RemoveAll",
- "Setting",
- "SettingsGroup",
- "TallDatastore",
- "Test",
- "TestResult",
- "Tiff",
- "TransformedDatastore",
- "ValueIterator",
- "VersionResults",
- "VideoReader",
- "VideoWriter",
- "abs",
- "accumarray",
- "acos",
- "acosd",
- "acosh",
- "acot",
- "acotd",
- "acoth",
- "acsc",
- "acscd",
- "acsch",
- "actxGetRunningServer",
- "actxserver",
- "add",
- "addCause",
- "addCorrection",
- "addFile",
- "addFolderIncludingChildFiles",
- "addGroup",
- "addLabel",
- "addPath",
- "addReference",
- "addSetting",
- "addShortcut",
- "addShutdownFile",
- "addStartupFile",
- "addStyle",
- "addToolbarExplorationButtons",
- "addboundary",
- "addcats",
- "addedge",
- "addevent",
- "addlistener",
- "addmulti",
- "addnode",
- "addpath",
- "addpoints",
- "addpref",
- "addprop",
- "addsample",
- "addsampletocollection",
- "addtodate",
- "addts",
- "addvars",
- "adjacency",
- "airy",
- "align",
- "alim",
- "all",
- "allchild",
- "alpha",
- "alphaShape",
- "alphaSpectrum",
- "alphaTriangulation",
- "alphamap",
- "alphanumericBoundary",
- "alphanumericsPattern",
- "amd",
- "analyzeCodeCompatibility",
- "ancestor",
- "angle",
- "animatedline",
- "annotation",
- "ans",
- "any",
- "appdesigner",
- "append",
- "area",
- "arguments",
- "array2table",
- "array2timetable",
- "arrayDatastore",
- "arrayfun",
- "asFewOfPattern",
- "asManyOfPattern",
- "ascii",
- "asec",
- "asecd",
- "asech",
- "asin",
- "asind",
- "asinh",
- "assert",
- "assignin",
- "atan",
- "atan2",
- "atan2d",
- "atand",
- "atanh",
- "audiodevinfo",
- "audiodevreset",
- "audioinfo",
- "audioplayer",
- "audioread",
- "audiorecorder",
- "audiowrite",
- "autumn",
- "axes",
- "axis",
- "axtoolbar",
- "axtoolbarbtn",
- "balance",
- "bandwidth",
- "bar",
- "bar3",
- "bar3h",
- "barh",
- "barycentricToCartesian",
- "base2dec",
- "batchStartupOptionUsed",
- "bctree",
- "beep",
- "bench",
- "besselh",
- "besseli",
- "besselj",
- "besselk",
- "bessely",
- "beta",
- "betainc",
- "betaincinv",
- "betaln",
- "between",
- "bfsearch",
- "bicg",
- "bicgstab",
- "bicgstabl",
- "biconncomp",
- "bin2dec",
- "binary",
- "binscatter",
- "bitand",
- "bitcmp",
- "bitget",
- "bitnot",
- "bitor",
- "bitset",
- "bitshift",
- "bitxor",
- "blanks",
- "ble",
- "blelist",
- "blkdiag",
- "bluetooth",
- "bluetoothlist",
- "bone",
- "boundary",
- "boundaryFacets",
- "boundaryshape",
- "boundingbox",
- "bounds",
- "box",
- "boxchart",
- "brighten",
- "brush",
- "bsxfun",
- "bubblechart",
- "bubblechart3",
- "bubblelegend",
- "bubblelim",
- "bubblesize",
- "builddocsearchdb",
- "builtin",
- "bvp4c",
- "bvp5c",
- "bvpget",
- "bvpinit",
- "bvpset",
- "bvpxtend",
- "caldays",
- "caldiff",
- "calendar",
- "calendarDuration",
- "calllib",
- "calmonths",
- "calquarters",
- "calweeks",
- "calyears",
- "camdolly",
- "cameratoolbar",
- "camlight",
- "camlookat",
- "camorbit",
- "campan",
- "campos",
- "camproj",
- "camroll",
- "camtarget",
- "camup",
- "camva",
- "camzoom",
- "canUseGPU",
- "canUseParallelPool",
- "cart2pol",
- "cart2sph",
- "cartesianToBarycentric",
- "caseInsensitivePattern",
- "caseSensitivePattern",
- "cast",
- "cat",
- "categorical",
- "categories",
- "caxis",
- "cd",
- "cdf2rdf",
- "cdfepoch",
- "cdfinfo",
- "cdflib",
- "cdfread",
- "ceil",
- "cell",
- "cell2mat",
- "cell2struct",
- "cell2table",
- "celldisp",
- "cellfun",
- "cellplot",
- "cellstr",
- "centrality",
- "centroid",
- "cgs",
- "char",
- "characterListPattern",
- "characteristic",
- "checkcode",
- "chol",
- "cholupdate",
- "choose",
- "chooseContextMenu",
- "circshift",
- "circumcenter",
- "cla",
- "clabel",
- "class",
- "classUnderlying",
- "clc",
- "clear",
- "clearAllMemoizedCaches",
- "clearPersonalValue",
- "clearTemporaryValue",
- "clearpoints",
- "clearvars",
- "clf",
- "clibArray",
- "clibConvertArray",
- "clibIsNull",
- "clibIsReadOnly",
- "clibRelease",
- "clibgen.buildInterface",
- "clibgen.generateLibraryDefinition",
- "clipboard",
- "clock",
- "clone",
- "close",
- "closeFile",
- "closereq",
- "cmap2gray",
- "cmpermute",
- "cmunique",
- "codeCompatibilityReport",
- "colamd",
- "collapse",
- "colon",
- "colorbar",
- "colorcube",
- "colormap",
- "colororder",
- "colperm",
- "com.mathworks.engine.MatlabEngine",
- "com.mathworks.matlab.types.CellStr",
- "com.mathworks.matlab.types.Complex",
- "com.mathworks.matlab.types.HandleObject",
- "com.mathworks.matlab.types.Struct",
- "combine",
- "comet",
- "comet3",
- "compan",
- "compass",
- "complex",
- "compose",
- "computer",
- "comserver",
- "cond",
- "condeig",
- "condensation",
- "condest",
- "coneplot",
- "configureCallback",
- "configureTerminator",
- "conj",
- "conncomp",
- "containers.Map",
- "contains",
- "containsrange",
- "contour",
- "contour3",
- "contourc",
- "contourf",
- "contourslice",
- "contrast",
- "conv",
- "conv2",
- "convertCharsToStrings",
- "convertContainedStringsToChars",
- "convertStringsToChars",
- "convertTo",
- "convertvars",
- "convexHull",
- "convhull",
- "convhulln",
- "convn",
- "cool",
- "copper",
- "copyHDU",
- "copyfile",
- "copygraphics",
- "copyobj",
- "corrcoef",
- "cos",
- "cosd",
- "cosh",
- "cospi",
- "cot",
- "cotd",
- "coth",
- "count",
- "countcats",
- "cov",
- "cplxpair",
- "cputime",
- "createCategory",
- "createFile",
- "createImg",
- "createLabel",
- "createTbl",
- "criticalAlpha",
- "cross",
- "csc",
- "cscd",
- "csch",
- "ctranspose",
- "cummax",
- "cummin",
- "cumprod",
- "cumsum",
- "cumtrapz",
- "curl",
- "currentProject",
- "cylinder",
- "daspect",
- "dataTipInteraction",
- "dataTipTextRow",
- "datacursormode",
- "datastore",
- "datatip",
- "date",
- "datenum",
- "dateshift",
- "datestr",
- "datetick",
- "datetime",
- "datevec",
- "day",
- "days",
- "dbclear",
- "dbcont",
- "dbdown",
- "dbmex",
- "dbquit",
- "dbstack",
- "dbstatus",
- "dbstep",
- "dbstop",
- "dbtype",
- "dbup",
- "dde23",
- "ddeget",
- "ddensd",
- "ddesd",
- "ddeset",
- "deblank",
- "dec2base",
- "dec2bin",
- "dec2hex",
- "decic",
- "decomposition",
- "deconv",
- "deg2rad",
- "degree",
- "del2",
- "delaunay",
- "delaunayTriangulation",
- "delaunayn",
- "delete",
- "deleteCol",
- "deleteFile",
- "deleteHDU",
- "deleteKey",
- "deleteRecord",
- "deleteRows",
- "delevent",
- "delimitedTextImportOptions",
- "delsample",
- "delsamplefromcollection",
- "demo",
- "descriptor",
- "det",
- "details",
- "detectImportOptions",
- "detrend",
- "deval",
- "dfsearch",
- "diag",
- "dialog",
- "diary",
- "diff",
- "diffuse",
- "digitBoundary",
- "digitsPattern",
- "digraph",
- "dir",
- "disableDefaultInteractivity",
- "discretize",
- "disp",
- "display",
- "dissect",
- "distances",
- "dither",
- "divergence",
- "dmperm",
- "doc",
- "docsearch",
- "dos",
- "dot",
- "double",
- "drag",
- "dragrect",
- "drawnow",
- "dsearchn",
- "duration",
- "dynamicprops",
- "echo",
- "echodemo",
- "echotcpip",
- "edgeAttachments",
- "edgecount",
- "edges",
- "edit",
- "eig",
- "eigs",
- "ellipj",
- "ellipke",
- "ellipsoid",
- "empty",
- "enableDefaultInteractivity",
- "enableLegacyExplorationModes",
- "enableNETfromNetworkDrive",
- "enableservice",
- "endsWith",
- "enumeration",
- "eomday",
- "eps",
- "eq",
- "equilibrate",
- "erase",
- "eraseBetween",
- "erf",
- "erfc",
- "erfcinv",
- "erfcx",
- "erfinv",
- "error",
- "errorbar",
- "errordlg",
- "etime",
- "etree",
- "etreeplot",
- "eval",
- "evalc",
- "evalin",
- "event.ClassInstanceEvent",
- "event.DynamicPropertyEvent",
- "event.EventData",
- "event.PropertyEvent",
- "event.hasListener",
- "event.listener",
- "event.proplistener",
- "eventlisteners",
- "events",
- "exceltime",
- "exist",
- "exit",
- "exp",
- "expand",
- "expint",
- "expm",
- "expm1",
- "export",
- "export2wsdlg",
- "exportapp",
- "exportgraphics",
- "exportsetupdlg",
- "extract",
- "extractAfter",
- "extractBefore",
- "extractBetween",
- "eye",
- "ezpolar",
- "faceNormal",
- "factor",
- "factorial",
- "false",
- "fclose",
- "fcontour",
- "feather",
- "featureEdges",
- "feof",
- "ferror",
- "feval",
- "fewerbins",
- "fft",
- "fft2",
- "fftn",
- "fftshift",
- "fftw",
- "fgetl",
- "fgets",
- "fieldnames",
- "figure",
- "figurepalette",
- "fileDatastore",
- "fileMode",
- "fileName",
- "fileattrib",
- "filemarker",
- "fileparts",
- "fileread",
- "filesep",
- "fill",
- "fill3",
- "fillmissing",
- "filloutliers",
- "filter",
- "filter2",
- "fimplicit",
- "fimplicit3",
- "find",
- "findCategory",
- "findEvent",
- "findFile",
- "findLabel",
- "findall",
- "findedge",
- "findfigs",
- "findgroups",
- "findnode",
- "findobj",
- "findprop",
- "finish",
- "fitsdisp",
- "fitsinfo",
- "fitsread",
- "fitswrite",
- "fix",
- "fixedWidthImportOptions",
- "flag",
- "flintmax",
- "flip",
- "flipedge",
- "fliplr",
- "flipud",
- "floor",
- "flow",
- "flush",
- "fmesh",
- "fminbnd",
- "fminsearch",
- "fopen",
- "format",
- "fplot",
- "fplot3",
- "fprintf",
- "frame2im",
- "fread",
- "freeBoundary",
- "freqspace",
- "frewind",
- "fscanf",
- "fseek",
- "fsurf",
- "ftell",
- "ftp",
- "full",
- "fullfile",
- "func2str",
- "function_handle",
- "functions",
- "functiontests",
- "funm",
- "fwrite",
- "fzero",
- "gallery",
- "gamma",
- "gammainc",
- "gammaincinv",
- "gammaln",
- "gather",
- "gca",
- "gcbf",
- "gcbo",
- "gcd",
- "gcf",
- "gcmr",
- "gco",
- "genpath",
- "geoaxes",
- "geobasemap",
- "geobubble",
- "geodensityplot",
- "geolimits",
- "geoplot",
- "geoscatter",
- "geotickformat",
- "get",
- "getAColParms",
- "getAxes",
- "getBColParms",
- "getColName",
- "getColType",
- "getColorbar",
- "getConstantValue",
- "getEqColType",
- "getFileFormats",
- "getHDUnum",
- "getHDUtype",
- "getHdrSpace",
- "getImgSize",
- "getImgType",
- "getLayout",
- "getLegend",
- "getMockHistory",
- "getNumCols",
- "getNumHDUs",
- "getNumInputs",
- "getNumInputsImpl",
- "getNumOutputs",
- "getNumOutputsImpl",
- "getNumRows",
- "getOpenFiles",
- "getProfiles",
- "getPropertyGroupsImpl",
- "getReport",
- "getTimeStr",
- "getVersion",
- "getabstime",
- "getappdata",
- "getaudiodata",
- "getdatasamples",
- "getdatasamplesize",
- "getenv",
- "getfield",
- "getframe",
- "getinterpmethod",
- "getnext",
- "getpinstatus",
- "getpixelposition",
- "getplayer",
- "getpoints",
- "getpref",
- "getqualitydesc",
- "getrangefromclass",
- "getsamples",
- "getsampleusingtime",
- "gettimeseriesnames",
- "gettsafteratevent",
- "gettsafterevent",
- "gettsatevent",
- "gettsbeforeatevent",
- "gettsbeforeevent",
- "gettsbetweenevents",
- "getvaropts",
- "ginput",
- "gmres",
- "gobjects",
- "gplot",
- "grabcode",
- "gradient",
- "graph",
- "gray",
- "grid",
- "griddata",
- "griddatan",
- "griddedInterpolant",
- "groot",
- "groupcounts",
- "groupfilter",
- "groupsummary",
- "grouptransform",
- "gsvd",
- "gtext",
- "guidata",
- "guide",
- "guihandles",
- "gunzip",
- "gzip",
- "h5create",
- "h5disp",
- "h5info",
- "h5read",
- "h5readatt",
- "h5write",
- "h5writeatt",
- "hadamard",
- "handle",
- "hankel",
- "hasFactoryValue",
- "hasFrame",
- "hasGroup",
- "hasPersonalValue",
- "hasSetting",
- "hasTemporaryValue",
- "hasdata",
- "hasnext",
- "hdfan",
- "hdfdf24",
- "hdfdfr8",
- "hdfh",
- "hdfhd",
- "hdfhe",
- "hdfhx",
- "hdfinfo",
- "hdfml",
- "hdfpt",
- "hdfread",
- "hdfv",
- "hdfvf",
- "hdfvh",
- "hdfvs",
- "head",
- "heatmap",
- "height",
- "help",
- "helpdlg",
- "hess",
- "hex2dec",
- "hex2num",
- "hgexport",
- "hggroup",
- "hgtransform",
- "hidden",
- "highlight",
- "hilb",
- "histcounts",
- "histcounts2",
- "histogram",
- "histogram2",
- "hms",
- "hold",
- "holes",
- "home",
- "horzcat",
- "hot",
- "hour",
- "hours",
- "hover",
- "hsv",
- "hsv2rgb",
- "hypot",
- "i",
- "ichol",
- "idealfilter",
- "idivide",
- "ifft",
- "ifft2",
- "ifftn",
- "ifftshift",
- "ilu",
- "im2double",
- "im2frame",
- "im2gray",
- "im2java",
- "imag",
- "image",
- "imageDatastore",
- "imagesc",
- "imapprox",
- "imfinfo",
- "imformats",
- "imgCompress",
- "import",
- "importdata",
- "imread",
- "imresize",
- "imshow",
- "imtile",
- "imwrite",
- "inShape",
- "incenter",
- "incidence",
- "ind2rgb",
- "ind2sub",
- "indegree",
- "inedges",
- "infoImpl",
- "inmem",
- "inner2outer",
- "innerjoin",
- "inpolygon",
- "input",
- "inputParser",
- "inputdlg",
- "inputname",
- "insertATbl",
- "insertAfter",
- "insertBTbl",
- "insertBefore",
- "insertCol",
- "insertImg",
- "insertRows",
- "int16",
- "int2str",
- "int32",
- "int64",
- "int8",
- "integral",
- "integral2",
- "integral3",
- "interp1",
- "interp2",
- "interp3",
- "interpft",
- "interpn",
- "interpstreamspeed",
- "intersect",
- "intmax",
- "intmin",
- "inv",
- "invhilb",
- "ipermute",
- "iqr",
- "isCompressedImg",
- "isConnected",
- "isDiscreteStateSpecificationMutableImpl",
- "isDone",
- "isDoneImpl",
- "isInactivePropertyImpl",
- "isInputComplexityMutableImpl",
- "isInputDataTypeMutableImpl",
- "isInputSizeMutableImpl",
- "isInterior",
- "isKey",
- "isLoaded",
- "isLocked",
- "isMATLABReleaseOlderThan",
- "isPartitionable",
- "isShuffleable",
- "isStringScalar",
- "isTunablePropertyDataTypeMutableImpl",
- "isUnderlyingType",
- "isa",
- "isaUnderlying",
- "isappdata",
- "isbanded",
- "isbetween",
- "iscalendarduration",
- "iscategorical",
- "iscategory",
- "iscell",
- "iscellstr",
- "ischange",
- "ischar",
- "iscolumn",
- "iscom",
- "isdag",
- "isdatetime",
- "isdiag",
- "isdst",
- "isduration",
- "isempty",
- "isenum",
- "isequal",
- "isequaln",
- "isevent",
- "isfield",
- "isfile",
- "isfinite",
- "isfloat",
- "isfolder",
- "isgraphics",
- "ishandle",
- "ishermitian",
- "ishold",
- "ishole",
- "isinf",
- "isinteger",
- "isinterface",
- "isinterior",
- "isisomorphic",
- "isjava",
- "iskeyword",
- "isletter",
- "islocalmax",
- "islocalmin",
- "islogical",
- "ismac",
- "ismatrix",
- "ismember",
- "ismembertol",
- "ismethod",
- "ismissing",
- "ismultigraph",
- "isnan",
- "isnat",
- "isnumeric",
- "isobject",
- "isocaps",
- "isocolors",
- "isomorphism",
- "isonormals",
- "isordinal",
- "isosurface",
- "isoutlier",
- "ispc",
- "isplaying",
- "ispref",
- "isprime",
- "isprop",
- "isprotected",
- "isreal",
- "isrecording",
- "isregular",
- "isrow",
- "isscalar",
- "issimplified",
- "issorted",
- "issortedrows",
- "isspace",
- "issparse",
- "isstring",
- "isstrprop",
- "isstruct",
- "isstudent",
- "issymmetric",
- "istable",
- "istall",
- "istimetable",
- "istril",
- "istriu",
- "isundefined",
- "isunix",
- "isvalid",
- "isvarname",
- "isvector",
- "isweekend",
- "j",
- "javaArray",
- "javaMethod",
- "javaMethodEDT",
- "javaObject",
- "javaObjectEDT",
- "javaaddpath",
- "javachk",
- "javaclasspath",
- "javarmpath",
- "jet",
- "join",
- "jsondecode",
- "jsonencode",
- "juliandate",
- "keyboard",
- "keys",
- "kron",
- "labeledge",
- "labelnode",
- "lag",
- "laplacian",
- "lastwarn",
- "layout",
- "lcm",
- "ldl",
- "leapseconds",
- "legend",
- "legendre",
- "length",
- "letterBoundary",
- "lettersPattern",
- "lib.pointer",
- "libfunctions",
- "libfunctionsview",
- "libisloaded",
- "libpointer",
- "libstruct",
- "license",
- "light",
- "lightangle",
- "lighting",
- "lin2mu",
- "line",
- "lineBoundary",
- "lines",
- "linkaxes",
- "linkdata",
- "linkprop",
- "linsolve",
- "linspace",
- "listModifiedFiles",
- "listRequiredFiles",
- "listdlg",
- "listener",
- "listfonts",
- "load",
- "loadObjectImpl",
- "loadlibrary",
- "loadobj",
- "localfunctions",
- "log",
- "log10",
- "log1p",
- "log2",
- "logical",
- "loglog",
- "logm",
- "logspace",
- "lookAheadBoundary",
- "lookBehindBoundary",
- "lookfor",
- "lower",
- "ls",
- "lscov",
- "lsqminnorm",
- "lsqnonneg",
- "lsqr",
- "lu",
- "magic",
- "makehgtform",
- "makima",
- "mapreduce",
- "mapreducer",
- "maskedPattern",
- "mat2cell",
- "mat2str",
- "matches",
- "matchpairs",
- "material",
- "matfile",
- "matlab.System",
- "matlab.addons.disableAddon",
- "matlab.addons.enableAddon",
- "matlab.addons.install",
- "matlab.addons.installedAddons",
- "matlab.addons.isAddonEnabled",
- "matlab.addons.toolbox.installToolbox",
- "matlab.addons.toolbox.installedToolboxes",
- "matlab.addons.toolbox.packageToolbox",
- "matlab.addons.toolbox.toolboxVersion",
- "matlab.addons.toolbox.uninstallToolbox",
- "matlab.addons.uninstall",
- "matlab.apputil.create",
- "matlab.apputil.getInstalledAppInfo",
- "matlab.apputil.install",
- "matlab.apputil.package",
- "matlab.apputil.run",
- "matlab.apputil.uninstall",
- "matlab.codetools.requiredFilesAndProducts",
- "matlab.engine.FutureResult",
- "matlab.engine.MatlabEngine",
- "matlab.engine.connect_matlab",
- "matlab.engine.engineName",
- "matlab.engine.find_matlab",
- "matlab.engine.isEngineShared",
- "matlab.engine.shareEngine",
- "matlab.engine.start_matlab",
- "matlab.exception.JavaException",
- "matlab.exception.PyException",
- "matlab.graphics.chartcontainer.ChartContainer",
- "matlab.graphics.chartcontainer.mixin.Colorbar",
- "matlab.graphics.chartcontainer.mixin.Legend",
- "matlab.io.Datastore",
- "matlab.io.datastore.BlockedFileSet",
- "matlab.io.datastore.DsFileReader",
- "matlab.io.datastore.DsFileSet",
- "matlab.io.datastore.FileSet",
- "matlab.io.datastore.FileWritable",
- "matlab.io.datastore.FoldersPropertyProvider",
- "matlab.io.datastore.HadoopLocationBased",
- "matlab.io.datastore.Partitionable",
- "matlab.io.datastore.Shuffleable",
- "matlab.io.hdf4.sd",
- "matlab.io.hdfeos.gd",
- "matlab.io.hdfeos.sw",
- "matlab.io.saveVariablesToScript",
- "matlab.lang.OnOffSwitchState",
- "matlab.lang.correction.AppendArgumentsCorrection",
- "matlab.lang.correction.ConvertToFunctionNotationCorrection",
- "matlab.lang.correction.ReplaceIdentifierCorrection",
- "matlab.lang.makeUniqueStrings",
- "matlab.lang.makeValidName",
- "matlab.mex.MexHost",
- "matlab.mixin.Copyable",
- "matlab.mixin.CustomDisplay",
- "matlab.mixin.Heterogeneous",
- "matlab.mixin.SetGet",
- "matlab.mixin.SetGetExactNames",
- "matlab.mixin.util.PropertyGroup",
- "matlab.mock.AnyArguments",
- "matlab.mock.InteractionHistory",
- "matlab.mock.InteractionHistory.forMock",
- "matlab.mock.MethodCallBehavior",
- "matlab.mock.PropertyBehavior",
- "matlab.mock.PropertyGetBehavior",
- "matlab.mock.PropertySetBehavior",
- "matlab.mock.TestCase",
- "matlab.mock.actions.AssignOutputs",
- "matlab.mock.actions.DoNothing",
- "matlab.mock.actions.Invoke",
- "matlab.mock.actions.ReturnStoredValue",
- "matlab.mock.actions.StoreValue",
- "matlab.mock.actions.ThrowException",
- "matlab.mock.constraints.Occurred",
- "matlab.mock.constraints.WasAccessed",
- "matlab.mock.constraints.WasCalled",
- "matlab.mock.constraints.WasSet",
- "matlab.net.ArrayFormat",
- "matlab.net.QueryParameter",
- "matlab.net.URI",
- "matlab.net.base64decode",
- "matlab.net.base64encode",
- "matlab.net.http.AuthInfo",
- "matlab.net.http.AuthenticationScheme",
- "matlab.net.http.Cookie",
- "matlab.net.http.CookieInfo",
- "matlab.net.http.Credentials",
- "matlab.net.http.Disposition",
- "matlab.net.http.HTTPException",
- "matlab.net.http.HTTPOptions",
- "matlab.net.http.HeaderField",
- "matlab.net.http.LogRecord",
- "matlab.net.http.MediaType",
- "matlab.net.http.Message",
- "matlab.net.http.MessageBody",
- "matlab.net.http.MessageType",
- "matlab.net.http.ProgressMonitor",
- "matlab.net.http.ProtocolVersion",
- "matlab.net.http.RequestLine",
- "matlab.net.http.RequestMessage",
- "matlab.net.http.RequestMethod",
- "matlab.net.http.ResponseMessage",
- "matlab.net.http.StartLine",
- "matlab.net.http.StatusClass",
- "matlab.net.http.StatusCode",
- "matlab.net.http.StatusLine",
- "matlab.net.http.field.AcceptField",
- "matlab.net.http.field.AuthenticateField",
- "matlab.net.http.field.AuthenticationInfoField",
- "matlab.net.http.field.AuthorizationField",
- "matlab.net.http.field.ContentDispositionField",
- "matlab.net.http.field.ContentLengthField",
- "matlab.net.http.field.ContentLocationField",
- "matlab.net.http.field.ContentTypeField",
- "matlab.net.http.field.CookieField",
- "matlab.net.http.field.DateField",
- "matlab.net.http.field.GenericField",
- "matlab.net.http.field.GenericParameterizedField",
- "matlab.net.http.field.HTTPDateField",
- "matlab.net.http.field.IntegerField",
- "matlab.net.http.field.LocationField",
- "matlab.net.http.field.MediaRangeField",
- "matlab.net.http.field.SetCookieField",
- "matlab.net.http.field.URIReferenceField",
- "matlab.net.http.io.BinaryConsumer",
- "matlab.net.http.io.ContentConsumer",
- "matlab.net.http.io.ContentProvider",
- "matlab.net.http.io.FileConsumer",
- "matlab.net.http.io.FileProvider",
- "matlab.net.http.io.FormProvider",
- "matlab.net.http.io.GenericConsumer",
- "matlab.net.http.io.GenericProvider",
- "matlab.net.http.io.ImageConsumer",
- "matlab.net.http.io.ImageProvider",
- "matlab.net.http.io.JSONConsumer",
- "matlab.net.http.io.JSONProvider",
- "matlab.net.http.io.MultipartConsumer",
- "matlab.net.http.io.MultipartFormProvider",
- "matlab.net.http.io.MultipartProvider",
- "matlab.net.http.io.StringConsumer",
- "matlab.net.http.io.StringProvider",
- "matlab.perftest.FixedTimeExperiment",
- "matlab.perftest.FrequentistTimeExperiment",
- "matlab.perftest.TestCase",
- "matlab.perftest.TimeExperiment",
- "matlab.perftest.TimeResult",
- "matlab.project.Project",
- "matlab.project.convertDefinitionFiles",
- "matlab.project.createProject",
- "matlab.project.deleteProject",
- "matlab.project.loadProject",
- "matlab.project.rootProject",
- "matlab.settings.FactoryGroup.createToolboxGroup",
- "matlab.settings.SettingsFileUpgrader",
- "matlab.settings.loadSettingsCompatibilityResults",
- "matlab.settings.mustBeIntegerScalar",
- "matlab.settings.mustBeLogicalScalar",
- "matlab.settings.mustBeNumericScalar",
- "matlab.settings.mustBeStringScalar",
- "matlab.settings.reloadFactoryFile",
- "matlab.system.mixin.FiniteSource",
- "matlab.tall.blockMovingWindow",
- "matlab.tall.movingWindow",
- "matlab.tall.reduce",
- "matlab.tall.transform",
- "matlab.test.behavior.Missing",
- "matlab.ui.componentcontainer.ComponentContainer",
- "matlab.uitest.TestCase",
- "matlab.uitest.TestCase.forInteractiveUse",
- "matlab.uitest.unlock",
- "matlab.unittest.Test",
- "matlab.unittest.TestCase",
- "matlab.unittest.TestResult",
- "matlab.unittest.TestRunner",
- "matlab.unittest.TestSuite",
- "matlab.unittest.constraints.BooleanConstraint",
- "matlab.unittest.constraints.Constraint",
- "matlab.unittest.constraints.Tolerance",
- "matlab.unittest.diagnostics.ConstraintDiagnostic",
- "matlab.unittest.diagnostics.Diagnostic",
- "matlab.unittest.fixtures.Fixture",
- "matlab.unittest.measurement.DefaultMeasurementResult",
- "matlab.unittest.measurement.MeasurementResult",
- "matlab.unittest.measurement.chart.ComparisonPlot",
- "matlab.unittest.plugins.OutputStream",
- "matlab.unittest.plugins.Parallelizable",
- "matlab.unittest.plugins.QualifyingPlugin",
- "matlab.unittest.plugins.TestRunnerPlugin",
- "matlab.wsdl.createWSDLClient",
- "matlab.wsdl.setWSDLToolPath",
- "matlabRelease",
- "matlabrc",
- "matlabroot",
- "max",
- "maxflow",
- "maxk",
- "mean",
- "median",
- "memmapfile",
- "memoize",
- "memory",
- "mergecats",
- "mergevars",
- "mesh",
- "meshc",
- "meshgrid",
- "meshz",
- "meta.ArrayDimension",
- "meta.DynamicProperty",
- "meta.EnumeratedValue",
- "meta.FixedDimension",
- "meta.MetaData",
- "meta.UnrestrictedDimension",
- "meta.Validation",
- "meta.abstractDetails",
- "meta.class",
- "meta.class.fromName",
- "meta.event",
- "meta.method",
- "meta.package",
- "meta.package.fromName",
- "meta.package.getAllPackages",
- "meta.property",
- "metaclass",
- "methods",
- "methodsview",
- "mex",
- "mexext",
- "mexhost",
- "mfilename",
- "mget",
- "milliseconds",
- "min",
- "mink",
- "minres",
- "minspantree",
- "minute",
- "minutes",
- "mislocked",
- "missing",
- "mkdir",
- "mkpp",
- "mldivide",
- "mlintrpt",
- "mlock",
- "mmfileinfo",
- "mod",
- "mode",
- "month",
- "more",
- "morebins",
- "movAbsHDU",
- "movNamHDU",
- "movRelHDU",
- "move",
- "movefile",
- "movegui",
- "movevars",
- "movie",
- "movmad",
- "movmax",
- "movmean",
- "movmedian",
- "movmin",
- "movprod",
- "movstd",
- "movsum",
- "movvar",
- "mpower",
- "mput",
- "mrdivide",
- "msgbox",
- "mtimes",
- "mu2lin",
- "multibandread",
- "multibandwrite",
- "munlock",
- "mustBeA",
- "mustBeFile",
- "mustBeFinite",
- "mustBeFloat",
- "mustBeFolder",
- "mustBeGreaterThan",
- "mustBeGreaterThanOrEqual",
- "mustBeInRange",
- "mustBeInteger",
- "mustBeLessThan",
- "mustBeLessThanOrEqual",
- "mustBeMember",
- "mustBeNegative",
- "mustBeNonNan",
- "mustBeNonempty",
- "mustBeNonmissing",
- "mustBeNonnegative",
- "mustBeNonpositive",
- "mustBeNonsparse",
- "mustBeNonzero",
- "mustBeNonzeroLengthText",
- "mustBeNumeric",
- "mustBeNumericOrLogical",
- "mustBePositive",
- "mustBeReal",
- "mustBeScalarOrEmpty",
- "mustBeText",
- "mustBeTextScalar",
- "mustBeUnderlyingType",
- "mustBeValidVariableName",
- "mustBeVector",
- "namedPattern",
- "namedargs2cell",
- "namelengthmax",
- "nargin",
- "narginchk",
- "nargout",
- "nargoutchk",
- "native2unicode",
- "nccreate",
- "ncdisp",
- "nchoosek",
- "ncinfo",
- "ncread",
- "ncreadatt",
- "ncwrite",
- "ncwriteatt",
- "ncwriteschema",
- "ndgrid",
- "ndims",
- "nearest",
- "nearestNeighbor",
- "nearestvertex",
- "neighbors",
- "netcdf.abort",
- "netcdf.close",
- "netcdf.copyAtt",
- "netcdf.create",
- "netcdf.defDim",
- "netcdf.defGrp",
- "netcdf.defVar",
- "netcdf.defVarChunking",
- "netcdf.defVarDeflate",
- "netcdf.defVarFill",
- "netcdf.defVarFletcher32",
- "netcdf.delAtt",
- "netcdf.endDef",
- "netcdf.getAtt",
- "netcdf.getChunkCache",
- "netcdf.getConstant",
- "netcdf.getConstantNames",
- "netcdf.getVar",
- "netcdf.inq",
- "netcdf.inqAtt",
- "netcdf.inqAttID",
- "netcdf.inqAttName",
- "netcdf.inqDim",
- "netcdf.inqDimID",
- "netcdf.inqDimIDs",
- "netcdf.inqFormat",
- "netcdf.inqGrpName",
- "netcdf.inqGrpNameFull",
- "netcdf.inqGrpParent",
- "netcdf.inqGrps",
- "netcdf.inqLibVers",
- "netcdf.inqNcid",
- "netcdf.inqUnlimDims",
- "netcdf.inqVar",
- "netcdf.inqVarChunking",
- "netcdf.inqVarDeflate",
- "netcdf.inqVarFill",
- "netcdf.inqVarFletcher32",
- "netcdf.inqVarID",
- "netcdf.inqVarIDs",
- "netcdf.open",
- "netcdf.putAtt",
- "netcdf.putVar",
- "netcdf.reDef",
- "netcdf.renameAtt",
- "netcdf.renameDim",
- "netcdf.renameVar",
- "netcdf.setChunkCache",
- "netcdf.setDefaultFormat",
- "netcdf.setFill",
- "netcdf.sync",
- "newline",
- "newplot",
- "nextpow2",
- "nexttile",
- "nnz",
- "nonzeros",
- "norm",
- "normalize",
- "normest",
- "notify",
- "now",
- "nsidedpoly",
- "nthroot",
- "nufft",
- "nufftn",
- "null",
- "num2cell",
- "num2hex",
- "num2ruler",
- "num2str",
- "numArgumentsFromSubscript",
- "numRegions",
- "numboundaries",
- "numedges",
- "numel",
- "numnodes",
- "numpartitions",
- "numsides",
- "nzmax",
- "ode113",
- "ode15i",
- "ode15s",
- "ode23",
- "ode23s",
- "ode23t",
- "ode23tb",
- "ode45",
- "odeget",
- "odeset",
- "odextend",
- "onCleanup",
- "ones",
- "open",
- "openDiskFile",
- "openFile",
- "openProject",
- "openfig",
- "opengl",
- "openvar",
- "optimget",
- "optimset",
- "optionalPattern",
- "ordeig",
- "orderfields",
- "ordqz",
- "ordschur",
- "orient",
- "orth",
- "outdegree",
- "outedges",
- "outerjoin",
- "overlaps",
- "overlapsrange",
- "pack",
- "pad",
- "padecoef",
- "pagectranspose",
- "pagemtimes",
- "pagetranspose",
- "pan",
- "panInteraction",
- "parallelplot",
- "pareto",
- "parquetDatastore",
- "parquetinfo",
- "parquetread",
- "parquetwrite",
- "partition",
- "parula",
- "pascal",
- "patch",
- "path",
- "pathsep",
- "pathtool",
- "pattern",
- "pause",
- "pbaspect",
- "pcg",
- "pchip",
- "pcode",
- "pcolor",
- "pdepe",
- "pdeval",
- "peaks",
- "perimeter",
- "perl",
- "perms",
- "permute",
- "pi",
- "pie",
- "pie3",
- "pink",
- "pinv",
- "planerot",
- "play",
- "playblocking",
- "plot",
- "plot3",
- "plotbrowser",
- "plotedit",
- "plotmatrix",
- "plottools",
- "plus",
- "pointLocation",
- "pol2cart",
- "polaraxes",
- "polarbubblechart",
- "polarhistogram",
- "polarplot",
- "polarscatter",
- "poly",
- "polyarea",
- "polybuffer",
- "polyder",
- "polyeig",
- "polyfit",
- "polyint",
- "polyshape",
- "polyval",
- "polyvalm",
- "posixtime",
- "possessivePattern",
- "pow2",
- "ppval",
- "predecessors",
- "prefdir",
- "preferences",
- "press",
- "preview",
- "primes",
- "print",
- "printdlg",
- "printopt",
- "printpreview",
- "prism",
- "processInputSpecificationChangeImpl",
- "processTunedPropertiesImpl",
- "prod",
- "profile",
- "propedit",
- "properties",
- "propertyeditor",
- "psi",
- "publish",
- "pwd",
- "pyargs",
- "pyenv",
- "qmr",
- "qr",
- "qrdelete",
- "qrinsert",
- "qrupdate",
- "quad2d",
- "quadgk",
- "quarter",
- "questdlg",
- "quit",
- "quiver",
- "quiver3",
- "qz",
- "rad2deg",
- "rand",
- "randi",
- "randn",
- "randperm",
- "rank",
- "rat",
- "rats",
- "rbbox",
- "rcond",
- "read",
- "readATblHdr",
- "readBTblHdr",
- "readCard",
- "readCol",
- "readFrame",
- "readImg",
- "readKey",
- "readKeyCmplx",
- "readKeyDbl",
- "readKeyLongLong",
- "readKeyLongStr",
- "readKeyUnit",
- "readRecord",
- "readall",
- "readcell",
- "readline",
- "readlines",
- "readmatrix",
- "readstruct",
- "readtable",
- "readtimetable",
- "readvars",
- "real",
- "reallog",
- "realmax",
- "realmin",
- "realpow",
- "realsqrt",
- "record",
- "recordblocking",
- "rectangle",
- "rectint",
- "recycle",
- "reducepatch",
- "reducevolume",
- "refresh",
- "refreshSourceControl",
- "refreshdata",
- "regexp",
- "regexpPattern",
- "regexpi",
- "regexprep",
- "regexptranslate",
- "regionZoomInteraction",
- "regions",
- "registerevent",
- "regmatlabserver",
- "rehash",
- "relationaloperators",
- "release",
- "releaseImpl",
- "reload",
- "rem",
- "remove",
- "removeCategory",
- "removeFile",
- "removeGroup",
- "removeLabel",
- "removePath",
- "removeReference",
- "removeSetting",
- "removeShortcut",
- "removeShutdownFile",
- "removeStartupFile",
- "removeStyle",
- "removeToolbarExplorationButtons",
- "removecats",
- "removets",
- "removevars",
- "rename",
- "renamecats",
- "renamevars",
- "rendererinfo",
- "reordercats",
- "reordernodes",
- "repelem",
- "replace",
- "replaceBetween",
- "repmat",
- "resample",
- "rescale",
- "reset",
- "resetImpl",
- "reshape",
- "residue",
- "restoredefaultpath",
- "resume",
- "rethrow",
- "retime",
- "reverse",
- "rgb2gray",
- "rgb2hsv",
- "rgb2ind",
- "rgbplot",
- "ribbon",
- "rlim",
- "rmappdata",
- "rmboundary",
- "rmdir",
- "rmedge",
- "rmfield",
- "rmholes",
- "rmmissing",
- "rmnode",
- "rmoutliers",
- "rmpath",
- "rmpref",
- "rmprop",
- "rmslivers",
- "rng",
- "roots",
- "rosser",
- "rot90",
- "rotate",
- "rotate3d",
- "rotateInteraction",
- "round",
- "rowfun",
- "rows2vars",
- "rref",
- "rsf2csf",
- "rtickangle",
- "rtickformat",
- "rticklabels",
- "rticks",
- "ruler2num",
- "rulerPanInteraction",
- "run",
- "runChecks",
- "runperf",
- "runtests",
- "save",
- "saveObjectImpl",
- "saveas",
- "savefig",
- "saveobj",
- "savepath",
- "scale",
- "scatter",
- "scatter3",
- "scatteredInterpolant",
- "scatterhistogram",
- "schur",
- "scroll",
- "sec",
- "secd",
- "sech",
- "second",
- "seconds",
- "semilogx",
- "semilogy",
- "sendmail",
- "serialport",
- "serialportlist",
- "set",
- "setBscale",
- "setCompressionType",
- "setDTR",
- "setHCompScale",
- "setHCompSmooth",
- "setProperties",
- "setRTS",
- "setTileDim",
- "setTscale",
- "setabstime",
- "setappdata",
- "setcats",
- "setdiff",
- "setenv",
- "setfield",
- "setinterpmethod",
- "setpixelposition",
- "setpref",
- "settimeseriesnames",
- "settings",
- "setuniformtime",
- "setup",
- "setupImpl",
- "setvaropts",
- "setvartype",
- "setxor",
- "sgtitle",
- "shading",
- "sheetnames",
- "shg",
- "shiftdim",
- "shortestpath",
- "shortestpathtree",
- "showplottool",
- "shrinkfaces",
- "shuffle",
- "sign",
- "simplify",
- "sin",
- "sind",
- "single",
- "sinh",
- "sinpi",
- "size",
- "slice",
- "smooth3",
- "smoothdata",
- "snapnow",
- "sort",
- "sortboundaries",
- "sortregions",
- "sortrows",
- "sortx",
- "sorty",
- "sound",
- "soundsc",
- "spalloc",
- "sparse",
- "spaugment",
- "spconvert",
- "spdiags",
- "specular",
- "speye",
- "spfun",
- "sph2cart",
- "sphere",
- "spinmap",
- "spline",
- "split",
- "splitapply",
- "splitlines",
- "splitvars",
- "spones",
- "spparms",
- "sprand",
- "sprandn",
- "sprandsym",
- "sprank",
- "spreadsheetDatastore",
- "spreadsheetImportOptions",
- "spring",
- "sprintf",
- "spy",
- "sqrt",
- "sqrtm",
- "squeeze",
- "ss2tf",
- "sscanf",
- "stack",
- "stackedplot",
- "stairs",
- "standardizeMissing",
- "start",
- "startat",
- "startsWith",
- "startup",
- "std",
- "stem",
- "stem3",
- "step",
- "stepImpl",
- "stlread",
- "stlwrite",
- "stop",
- "str2double",
- "str2func",
- "str2num",
- "strcat",
- "strcmp",
- "strcmpi",
- "stream2",
- "stream3",
- "streamline",
- "streamparticles",
- "streamribbon",
- "streamslice",
- "streamtube",
- "strfind",
- "string",
- "strings",
- "strip",
- "strjoin",
- "strjust",
- "strlength",
- "strncmp",
- "strncmpi",
- "strrep",
- "strsplit",
- "strtok",
- "strtrim",
- "struct",
- "struct2cell",
- "struct2table",
- "structfun",
- "sub2ind",
- "subgraph",
- "subplot",
- "subsasgn",
- "subscribe",
- "subsindex",
- "subspace",
- "subsref",
- "substruct",
- "subtitle",
- "subtract",
- "subvolume",
- "successors",
- "sum",
- "summary",
- "summer",
- "superclasses",
- "surf",
- "surf2patch",
- "surface",
- "surfaceArea",
- "surfc",
- "surfl",
- "surfnorm",
- "svd",
- "svds",
- "svdsketch",
- "swapbytes",
- "swarmchart",
- "swarmchart3",
- "sylvester",
- "symamd",
- "symbfact",
- "symmlq",
- "symrcm",
- "synchronize",
- "sysobjupdate",
- "system",
- "table",
- "table2array",
- "table2cell",
- "table2struct",
- "table2timetable",
- "tabularTextDatastore",
- "tail",
- "tall",
- "tallrng",
- "tan",
- "tand",
- "tanh",
- "tar",
- "tcpclient",
- "tempdir",
- "tempname",
- "testsuite",
- "tetramesh",
- "texlabel",
- "text",
- "textBoundary",
- "textscan",
- "textwrap",
- "tfqmr",
- "thetalim",
- "thetatickformat",
- "thetaticklabels",
- "thetaticks",
- "thingSpeakRead",
- "thingSpeakWrite",
- "throw",
- "throwAsCaller",
- "tic",
- "tiledlayout",
- "time",
- "timeit",
- "timeofday",
- "timer",
- "timerange",
- "timerfind",
- "timerfindall",
- "timeseries",
- "timetable",
- "timetable2table",
- "timezones",
- "title",
- "toc",
- "todatenum",
- "toeplitz",
- "toolboxdir",
- "topkrows",
- "toposort",
- "trace",
- "transclosure",
- "transform",
- "translate",
- "transpose",
- "transreduction",
- "trapz",
- "treelayout",
- "treeplot",
- "triangulation",
- "tril",
- "trimesh",
- "triplot",
- "trisurf",
- "triu",
- "true",
- "tscollection",
- "tsdata.event",
- "tsearchn",
- "turbo",
- "turningdist",
- "type",
- "typecast",
- "tzoffset",
- "uialert",
- "uiaxes",
- "uibutton",
- "uibuttongroup",
- "uicheckbox",
- "uiconfirm",
- "uicontextmenu",
- "uicontrol",
- "uidatepicker",
- "uidropdown",
- "uieditfield",
- "uifigure",
- "uigauge",
- "uigetdir",
- "uigetfile",
- "uigetpref",
- "uigridlayout",
- "uihtml",
- "uiimage",
- "uiknob",
- "uilabel",
- "uilamp",
- "uilistbox",
- "uimenu",
- "uint16",
- "uint32",
- "uint64",
- "uint8",
- "uiopen",
- "uipanel",
- "uiprogressdlg",
- "uipushtool",
- "uiputfile",
- "uiradiobutton",
- "uiresume",
- "uisave",
- "uisetcolor",
- "uisetfont",
- "uisetpref",
- "uislider",
- "uispinner",
- "uistack",
- "uistyle",
- "uiswitch",
- "uitab",
- "uitabgroup",
- "uitable",
- "uitextarea",
- "uitogglebutton",
- "uitoggletool",
- "uitoolbar",
- "uitree",
- "uitreenode",
- "uiwait",
- "uminus",
- "underlyingType",
- "underlyingValue",
- "unicode2native",
- "union",
- "unique",
- "uniquetol",
- "unix",
- "unloadlibrary",
- "unmesh",
- "unmkpp",
- "unregisterallevents",
- "unregisterevent",
- "unstack",
- "unsubscribe",
- "untar",
- "unwrap",
- "unzip",
- "update",
- "updateDependencies",
- "uplus",
- "upper",
- "usejava",
- "userpath",
- "validateFunctionSignaturesJSON",
- "validateInputsImpl",
- "validatePropertiesImpl",
- "validateattributes",
- "validatecolor",
- "validatestring",
- "values",
- "vander",
- "var",
- "varargin",
- "varargout",
- "varfun",
- "vartype",
- "vecnorm",
- "ver",
- "verLessThan",
- "version",
- "vertcat",
- "vertexAttachments",
- "vertexNormal",
- "view",
- "viewmtx",
- "visdiff",
- "volume",
- "volumebounds",
- "voronoi",
- "voronoiDiagram",
- "voronoin",
- "wait",
- "waitbar",
- "waitfor",
- "waitforbuttonpress",
- "warndlg",
- "warning",
- "waterfall",
- "web",
- "weboptions",
- "webread",
- "websave",
- "webwrite",
- "week",
- "weekday",
- "what",
- "which",
- "whitespaceBoundary",
- "whitespacePattern",
- "who",
- "whos",
- "width",
- "wildcardPattern",
- "wilkinson",
- "winopen",
- "winqueryreg",
- "winter",
- "withinrange",
- "withtol",
- "wordcloud",
- "write",
- "writeChecksum",
- "writeCol",
- "writeComment",
- "writeDate",
- "writeHistory",
- "writeImg",
- "writeKey",
- "writeKeyUnit",
- "writeVideo",
- "writeall",
- "writecell",
- "writeline",
- "writematrix",
- "writestruct",
- "writetable",
- "writetimetable",
- "xcorr",
- "xcov",
- "xlabel",
- "xlim",
- "xline",
- "xmlread",
- "xmlwrite",
- "xor",
- "xslt",
- "xtickangle",
- "xtickformat",
- "xticklabels",
- "xticks",
- "year",
- "years",
- "ylabel",
- "ylim",
- "yline",
- "ymd",
- "ytickangle",
- "ytickformat",
- "yticklabels",
- "yticks",
- "yyaxis",
- "yyyymmdd",
- "zeros",
- "zip",
- "zlabel",
- "zlim",
- "zoom",
- "zoomInteraction",
- "ztickangle",
- "ztickformat",
- "zticklabels",
- "zticks",
- ],
- prefix=r"(?<!\.)(", # Exclude field names
- suffix=r")\b"
- ),
- Name.Builtin
- ),
+ (r'(\s*^\s*)(function)\b', bygroups(Whitespace, Keyword), 'deffunc'),
+ (r'(\s*^\s*)(properties)(\s+)(\()',
+ bygroups(Whitespace, Keyword, Whitespace, Punctuation),
+ ('defprops', 'propattrs')),
+ (r'(\s*^\s*)(properties)\b',
+ bygroups(Whitespace, Keyword), 'defprops'),
+
+ # from 'iskeyword' on version 9.4 (R2018a):
+ # Check that there is no preceding dot, as keywords are valid field
+ # names.
+ (words(('break', 'case', 'catch', 'classdef', 'continue',
+ 'dynamicprops', 'else', 'elseif', 'end', 'for', 'function',
+ 'global', 'if', 'methods', 'otherwise', 'parfor',
+ 'persistent', 'return', 'spmd', 'switch',
+ 'try', 'while'),
+ prefix=r'(?<!\.)(\s*)(', suffix=r')\b'),
+ bygroups(Whitespace, Keyword)),
+
+ (
+ words(
+ [
+ # See https://mathworks.com/help/matlab/referencelist.html
+ # Below data from 2021-02-10T18:24:08Z
+ # for Matlab release R2020b
+ "BeginInvoke",
+ "COM",
+ "Combine",
+ "CombinedDatastore",
+ "EndInvoke",
+ "Execute",
+ "FactoryGroup",
+ "FactorySetting",
+ "Feval",
+ "FunctionTestCase",
+ "GetCharArray",
+ "GetFullMatrix",
+ "GetVariable",
+ "GetWorkspaceData",
+ "GraphPlot",
+ "H5.close",
+ "H5.garbage_collect",
+ "H5.get_libversion",
+ "H5.open",
+ "H5.set_free_list_limits",
+ "H5A.close",
+ "H5A.create",
+ "H5A.delete",
+ "H5A.get_info",
+ "H5A.get_name",
+ "H5A.get_space",
+ "H5A.get_type",
+ "H5A.iterate",
+ "H5A.open",
+ "H5A.open_by_idx",
+ "H5A.open_by_name",
+ "H5A.read",
+ "H5A.write",
+ "H5D.close",
+ "H5D.create",
+ "H5D.get_access_plist",
+ "H5D.get_create_plist",
+ "H5D.get_offset",
+ "H5D.get_space",
+ "H5D.get_space_status",
+ "H5D.get_storage_size",
+ "H5D.get_type",
+ "H5D.open",
+ "H5D.read",
+ "H5D.set_extent",
+ "H5D.vlen_get_buf_size",
+ "H5D.write",
+ "H5DS.attach_scale",
+ "H5DS.detach_scale",
+ "H5DS.get_label",
+ "H5DS.get_num_scales",
+ "H5DS.get_scale_name",
+ "H5DS.is_scale",
+ "H5DS.iterate_scales",
+ "H5DS.set_label",
+ "H5DS.set_scale",
+ "H5E.clear",
+ "H5E.get_major",
+ "H5E.get_minor",
+ "H5E.walk",
+ "H5F.close",
+ "H5F.create",
+ "H5F.flush",
+ "H5F.get_access_plist",
+ "H5F.get_create_plist",
+ "H5F.get_filesize",
+ "H5F.get_freespace",
+ "H5F.get_info",
+ "H5F.get_mdc_config",
+ "H5F.get_mdc_hit_rate",
+ "H5F.get_mdc_size",
+ "H5F.get_name",
+ "H5F.get_obj_count",
+ "H5F.get_obj_ids",
+ "H5F.is_hdf5",
+ "H5F.mount",
+ "H5F.open",
+ "H5F.reopen",
+ "H5F.set_mdc_config",
+ "H5F.unmount",
+ "H5G.close",
+ "H5G.create",
+ "H5G.get_info",
+ "H5G.open",
+ "H5I.dec_ref",
+ "H5I.get_file_id",
+ "H5I.get_name",
+ "H5I.get_ref",
+ "H5I.get_type",
+ "H5I.inc_ref",
+ "H5I.is_valid",
+ "H5L.copy",
+ "H5L.create_external",
+ "H5L.create_hard",
+ "H5L.create_soft",
+ "H5L.delete",
+ "H5L.exists",
+ "H5L.get_info",
+ "H5L.get_name_by_idx",
+ "H5L.get_val",
+ "H5L.iterate",
+ "H5L.iterate_by_name",
+ "H5L.move",
+ "H5L.visit",
+ "H5L.visit_by_name",
+ "H5ML.compare_values",
+ "H5ML.get_constant_names",
+ "H5ML.get_constant_value",
+ "H5ML.get_function_names",
+ "H5ML.get_mem_datatype",
+ "H5O.close",
+ "H5O.copy",
+ "H5O.get_comment",
+ "H5O.get_comment_by_name",
+ "H5O.get_info",
+ "H5O.link",
+ "H5O.open",
+ "H5O.open_by_idx",
+ "H5O.set_comment",
+ "H5O.set_comment_by_name",
+ "H5O.visit",
+ "H5O.visit_by_name",
+ "H5P.all_filters_avail",
+ "H5P.close",
+ "H5P.close_class",
+ "H5P.copy",
+ "H5P.create",
+ "H5P.equal",
+ "H5P.exist",
+ "H5P.fill_value_defined",
+ "H5P.get",
+ "H5P.get_alignment",
+ "H5P.get_alloc_time",
+ "H5P.get_attr_creation_order",
+ "H5P.get_attr_phase_change",
+ "H5P.get_btree_ratios",
+ "H5P.get_char_encoding",
+ "H5P.get_chunk",
+ "H5P.get_chunk_cache",
+ "H5P.get_class",
+ "H5P.get_class_name",
+ "H5P.get_class_parent",
+ "H5P.get_copy_object",
+ "H5P.get_create_intermediate_group",
+ "H5P.get_driver",
+ "H5P.get_edc_check",
+ "H5P.get_external",
+ "H5P.get_external_count",
+ "H5P.get_family_offset",
+ "H5P.get_fapl_core",
+ "H5P.get_fapl_family",
+ "H5P.get_fapl_multi",
+ "H5P.get_fclose_degree",
+ "H5P.get_fill_time",
+ "H5P.get_fill_value",
+ "H5P.get_filter",
+ "H5P.get_filter_by_id",
+ "H5P.get_gc_references",
+ "H5P.get_hyper_vector_size",
+ "H5P.get_istore_k",
+ "H5P.get_layout",
+ "H5P.get_libver_bounds",
+ "H5P.get_link_creation_order",
+ "H5P.get_link_phase_change",
+ "H5P.get_mdc_config",
+ "H5P.get_meta_block_size",
+ "H5P.get_multi_type",
+ "H5P.get_nfilters",
+ "H5P.get_nprops",
+ "H5P.get_sieve_buf_size",
+ "H5P.get_size",
+ "H5P.get_sizes",
+ "H5P.get_small_data_block_size",
+ "H5P.get_sym_k",
+ "H5P.get_userblock",
+ "H5P.get_version",
+ "H5P.isa_class",
+ "H5P.iterate",
+ "H5P.modify_filter",
+ "H5P.remove_filter",
+ "H5P.set",
+ "H5P.set_alignment",
+ "H5P.set_alloc_time",
+ "H5P.set_attr_creation_order",
+ "H5P.set_attr_phase_change",
+ "H5P.set_btree_ratios",
+ "H5P.set_char_encoding",
+ "H5P.set_chunk",
+ "H5P.set_chunk_cache",
+ "H5P.set_copy_object",
+ "H5P.set_create_intermediate_group",
+ "H5P.set_deflate",
+ "H5P.set_edc_check",
+ "H5P.set_external",
+ "H5P.set_family_offset",
+ "H5P.set_fapl_core",
+ "H5P.set_fapl_family",
+ "H5P.set_fapl_log",
+ "H5P.set_fapl_multi",
+ "H5P.set_fapl_sec2",
+ "H5P.set_fapl_split",
+ "H5P.set_fapl_stdio",
+ "H5P.set_fclose_degree",
+ "H5P.set_fill_time",
+ "H5P.set_fill_value",
+ "H5P.set_filter",
+ "H5P.set_fletcher32",
+ "H5P.set_gc_references",
+ "H5P.set_hyper_vector_size",
+ "H5P.set_istore_k",
+ "H5P.set_layout",
+ "H5P.set_libver_bounds",
+ "H5P.set_link_creation_order",
+ "H5P.set_link_phase_change",
+ "H5P.set_mdc_config",
+ "H5P.set_meta_block_size",
+ "H5P.set_multi_type",
+ "H5P.set_nbit",
+ "H5P.set_scaleoffset",
+ "H5P.set_shuffle",
+ "H5P.set_sieve_buf_size",
+ "H5P.set_sizes",
+ "H5P.set_small_data_block_size",
+ "H5P.set_sym_k",
+ "H5P.set_userblock",
+ "H5R.create",
+ "H5R.dereference",
+ "H5R.get_name",
+ "H5R.get_obj_type",
+ "H5R.get_region",
+ "H5S.close",
+ "H5S.copy",
+ "H5S.create",
+ "H5S.create_simple",
+ "H5S.extent_copy",
+ "H5S.get_select_bounds",
+ "H5S.get_select_elem_npoints",
+ "H5S.get_select_elem_pointlist",
+ "H5S.get_select_hyper_blocklist",
+ "H5S.get_select_hyper_nblocks",
+ "H5S.get_select_npoints",
+ "H5S.get_select_type",
+ "H5S.get_simple_extent_dims",
+ "H5S.get_simple_extent_ndims",
+ "H5S.get_simple_extent_npoints",
+ "H5S.get_simple_extent_type",
+ "H5S.is_simple",
+ "H5S.offset_simple",
+ "H5S.select_all",
+ "H5S.select_elements",
+ "H5S.select_hyperslab",
+ "H5S.select_none",
+ "H5S.select_valid",
+ "H5S.set_extent_none",
+ "H5S.set_extent_simple",
+ "H5T.array_create",
+ "H5T.close",
+ "H5T.commit",
+ "H5T.committed",
+ "H5T.copy",
+ "H5T.create",
+ "H5T.detect_class",
+ "H5T.enum_create",
+ "H5T.enum_insert",
+ "H5T.enum_nameof",
+ "H5T.enum_valueof",
+ "H5T.equal",
+ "H5T.get_array_dims",
+ "H5T.get_array_ndims",
+ "H5T.get_class",
+ "H5T.get_create_plist",
+ "H5T.get_cset",
+ "H5T.get_ebias",
+ "H5T.get_fields",
+ "H5T.get_inpad",
+ "H5T.get_member_class",
+ "H5T.get_member_index",
+ "H5T.get_member_name",
+ "H5T.get_member_offset",
+ "H5T.get_member_type",
+ "H5T.get_member_value",
+ "H5T.get_native_type",
+ "H5T.get_nmembers",
+ "H5T.get_norm",
+ "H5T.get_offset",
+ "H5T.get_order",
+ "H5T.get_pad",
+ "H5T.get_precision",
+ "H5T.get_sign",
+ "H5T.get_size",
+ "H5T.get_strpad",
+ "H5T.get_super",
+ "H5T.get_tag",
+ "H5T.insert",
+ "H5T.is_variable_str",
+ "H5T.lock",
+ "H5T.open",
+ "H5T.pack",
+ "H5T.set_cset",
+ "H5T.set_ebias",
+ "H5T.set_fields",
+ "H5T.set_inpad",
+ "H5T.set_norm",
+ "H5T.set_offset",
+ "H5T.set_order",
+ "H5T.set_pad",
+ "H5T.set_precision",
+ "H5T.set_sign",
+ "H5T.set_size",
+ "H5T.set_strpad",
+ "H5T.set_tag",
+ "H5T.vlen_create",
+ "H5Z.filter_avail",
+ "H5Z.get_filter_info",
+ "Inf",
+ "KeyValueDatastore",
+ "KeyValueStore",
+ "MException",
+ "MException.last",
+ "MaximizeCommandWindow",
+ "MemoizedFunction",
+ "MinimizeCommandWindow",
+ "NET",
+ "NET.Assembly",
+ "NET.GenericClass",
+ "NET.NetException",
+ "NET.addAssembly",
+ "NET.convertArray",
+ "NET.createArray",
+ "NET.createGeneric",
+ "NET.disableAutoRelease",
+ "NET.enableAutoRelease",
+ "NET.invokeGenericMethod",
+ "NET.isNETSupported",
+ "NET.setStaticProperty",
+ "NaN",
+ "NaT",
+ "OperationResult",
+ "PutCharArray",
+ "PutFullMatrix",
+ "PutWorkspaceData",
+ "PythonEnvironment",
+ "Quit",
+ "RandStream",
+ "ReleaseCompatibilityException",
+ "ReleaseCompatibilityResults",
+ "Remove",
+ "RemoveAll",
+ "Setting",
+ "SettingsGroup",
+ "TallDatastore",
+ "Test",
+ "TestResult",
+ "Tiff",
+ "TransformedDatastore",
+ "ValueIterator",
+ "VersionResults",
+ "VideoReader",
+ "VideoWriter",
+ "abs",
+ "accumarray",
+ "acos",
+ "acosd",
+ "acosh",
+ "acot",
+ "acotd",
+ "acoth",
+ "acsc",
+ "acscd",
+ "acsch",
+ "actxGetRunningServer",
+ "actxserver",
+ "add",
+ "addCause",
+ "addCorrection",
+ "addFile",
+ "addFolderIncludingChildFiles",
+ "addGroup",
+ "addLabel",
+ "addPath",
+ "addReference",
+ "addSetting",
+ "addShortcut",
+ "addShutdownFile",
+ "addStartupFile",
+ "addStyle",
+ "addToolbarExplorationButtons",
+ "addboundary",
+ "addcats",
+ "addedge",
+ "addevent",
+ "addlistener",
+ "addmulti",
+ "addnode",
+ "addpath",
+ "addpoints",
+ "addpref",
+ "addprop",
+ "addsample",
+ "addsampletocollection",
+ "addtodate",
+ "addts",
+ "addvars",
+ "adjacency",
+ "airy",
+ "align",
+ "alim",
+ "all",
+ "allchild",
+ "alpha",
+ "alphaShape",
+ "alphaSpectrum",
+ "alphaTriangulation",
+ "alphamap",
+ "alphanumericBoundary",
+ "alphanumericsPattern",
+ "amd",
+ "analyzeCodeCompatibility",
+ "ancestor",
+ "angle",
+ "animatedline",
+ "annotation",
+ "ans",
+ "any",
+ "appdesigner",
+ "append",
+ "area",
+ "arguments",
+ "array2table",
+ "array2timetable",
+ "arrayDatastore",
+ "arrayfun",
+ "asFewOfPattern",
+ "asManyOfPattern",
+ "ascii",
+ "asec",
+ "asecd",
+ "asech",
+ "asin",
+ "asind",
+ "asinh",
+ "assert",
+ "assignin",
+ "atan",
+ "atan2",
+ "atan2d",
+ "atand",
+ "atanh",
+ "audiodevinfo",
+ "audiodevreset",
+ "audioinfo",
+ "audioplayer",
+ "audioread",
+ "audiorecorder",
+ "audiowrite",
+ "autumn",
+ "axes",
+ "axis",
+ "axtoolbar",
+ "axtoolbarbtn",
+ "balance",
+ "bandwidth",
+ "bar",
+ "bar3",
+ "bar3h",
+ "barh",
+ "barycentricToCartesian",
+ "base2dec",
+ "batchStartupOptionUsed",
+ "bctree",
+ "beep",
+ "bench",
+ "besselh",
+ "besseli",
+ "besselj",
+ "besselk",
+ "bessely",
+ "beta",
+ "betainc",
+ "betaincinv",
+ "betaln",
+ "between",
+ "bfsearch",
+ "bicg",
+ "bicgstab",
+ "bicgstabl",
+ "biconncomp",
+ "bin2dec",
+ "binary",
+ "binscatter",
+ "bitand",
+ "bitcmp",
+ "bitget",
+ "bitnot",
+ "bitor",
+ "bitset",
+ "bitshift",
+ "bitxor",
+ "blanks",
+ "ble",
+ "blelist",
+ "blkdiag",
+ "bluetooth",
+ "bluetoothlist",
+ "bone",
+ "boundary",
+ "boundaryFacets",
+ "boundaryshape",
+ "boundingbox",
+ "bounds",
+ "box",
+ "boxchart",
+ "brighten",
+ "brush",
+ "bsxfun",
+ "bubblechart",
+ "bubblechart3",
+ "bubblelegend",
+ "bubblelim",
+ "bubblesize",
+ "builddocsearchdb",
+ "builtin",
+ "bvp4c",
+ "bvp5c",
+ "bvpget",
+ "bvpinit",
+ "bvpset",
+ "bvpxtend",
+ "caldays",
+ "caldiff",
+ "calendar",
+ "calendarDuration",
+ "calllib",
+ "calmonths",
+ "calquarters",
+ "calweeks",
+ "calyears",
+ "camdolly",
+ "cameratoolbar",
+ "camlight",
+ "camlookat",
+ "camorbit",
+ "campan",
+ "campos",
+ "camproj",
+ "camroll",
+ "camtarget",
+ "camup",
+ "camva",
+ "camzoom",
+ "canUseGPU",
+ "canUseParallelPool",
+ "cart2pol",
+ "cart2sph",
+ "cartesianToBarycentric",
+ "caseInsensitivePattern",
+ "caseSensitivePattern",
+ "cast",
+ "cat",
+ "categorical",
+ "categories",
+ "caxis",
+ "cd",
+ "cdf2rdf",
+ "cdfepoch",
+ "cdfinfo",
+ "cdflib",
+ "cdfread",
+ "ceil",
+ "cell",
+ "cell2mat",
+ "cell2struct",
+ "cell2table",
+ "celldisp",
+ "cellfun",
+ "cellplot",
+ "cellstr",
+ "centrality",
+ "centroid",
+ "cgs",
+ "char",
+ "characterListPattern",
+ "characteristic",
+ "checkcode",
+ "chol",
+ "cholupdate",
+ "choose",
+ "chooseContextMenu",
+ "circshift",
+ "circumcenter",
+ "cla",
+ "clabel",
+ "class",
+ "classUnderlying",
+ "clc",
+ "clear",
+ "clearAllMemoizedCaches",
+ "clearPersonalValue",
+ "clearTemporaryValue",
+ "clearpoints",
+ "clearvars",
+ "clf",
+ "clibArray",
+ "clibConvertArray",
+ "clibIsNull",
+ "clibIsReadOnly",
+ "clibRelease",
+ "clibgen.buildInterface",
+ "clibgen.generateLibraryDefinition",
+ "clipboard",
+ "clock",
+ "clone",
+ "close",
+ "closeFile",
+ "closereq",
+ "cmap2gray",
+ "cmpermute",
+ "cmunique",
+ "codeCompatibilityReport",
+ "colamd",
+ "collapse",
+ "colon",
+ "colorbar",
+ "colorcube",
+ "colormap",
+ "colororder",
+ "colperm",
+ "com.mathworks.engine.MatlabEngine",
+ "com.mathworks.matlab.types.CellStr",
+ "com.mathworks.matlab.types.Complex",
+ "com.mathworks.matlab.types.HandleObject",
+ "com.mathworks.matlab.types.Struct",
+ "combine",
+ "comet",
+ "comet3",
+ "compan",
+ "compass",
+ "complex",
+ "compose",
+ "computer",
+ "comserver",
+ "cond",
+ "condeig",
+ "condensation",
+ "condest",
+ "coneplot",
+ "configureCallback",
+ "configureTerminator",
+ "conj",
+ "conncomp",
+ "containers.Map",
+ "contains",
+ "containsrange",
+ "contour",
+ "contour3",
+ "contourc",
+ "contourf",
+ "contourslice",
+ "contrast",
+ "conv",
+ "conv2",
+ "convertCharsToStrings",
+ "convertContainedStringsToChars",
+ "convertStringsToChars",
+ "convertTo",
+ "convertvars",
+ "convexHull",
+ "convhull",
+ "convhulln",
+ "convn",
+ "cool",
+ "copper",
+ "copyHDU",
+ "copyfile",
+ "copygraphics",
+ "copyobj",
+ "corrcoef",
+ "cos",
+ "cosd",
+ "cosh",
+ "cospi",
+ "cot",
+ "cotd",
+ "coth",
+ "count",
+ "countcats",
+ "cov",
+ "cplxpair",
+ "cputime",
+ "createCategory",
+ "createFile",
+ "createImg",
+ "createLabel",
+ "createTbl",
+ "criticalAlpha",
+ "cross",
+ "csc",
+ "cscd",
+ "csch",
+ "ctranspose",
+ "cummax",
+ "cummin",
+ "cumprod",
+ "cumsum",
+ "cumtrapz",
+ "curl",
+ "currentProject",
+ "cylinder",
+ "daspect",
+ "dataTipInteraction",
+ "dataTipTextRow",
+ "datacursormode",
+ "datastore",
+ "datatip",
+ "date",
+ "datenum",
+ "dateshift",
+ "datestr",
+ "datetick",
+ "datetime",
+ "datevec",
+ "day",
+ "days",
+ "dbclear",
+ "dbcont",
+ "dbdown",
+ "dbmex",
+ "dbquit",
+ "dbstack",
+ "dbstatus",
+ "dbstep",
+ "dbstop",
+ "dbtype",
+ "dbup",
+ "dde23",
+ "ddeget",
+ "ddensd",
+ "ddesd",
+ "ddeset",
+ "deblank",
+ "dec2base",
+ "dec2bin",
+ "dec2hex",
+ "decic",
+ "decomposition",
+ "deconv",
+ "deg2rad",
+ "degree",
+ "del2",
+ "delaunay",
+ "delaunayTriangulation",
+ "delaunayn",
+ "delete",
+ "deleteCol",
+ "deleteFile",
+ "deleteHDU",
+ "deleteKey",
+ "deleteRecord",
+ "deleteRows",
+ "delevent",
+ "delimitedTextImportOptions",
+ "delsample",
+ "delsamplefromcollection",
+ "demo",
+ "descriptor",
+ "det",
+ "details",
+ "detectImportOptions",
+ "detrend",
+ "deval",
+ "dfsearch",
+ "diag",
+ "dialog",
+ "diary",
+ "diff",
+ "diffuse",
+ "digitBoundary",
+ "digitsPattern",
+ "digraph",
+ "dir",
+ "disableDefaultInteractivity",
+ "discretize",
+ "disp",
+ "display",
+ "dissect",
+ "distances",
+ "dither",
+ "divergence",
+ "dmperm",
+ "doc",
+ "docsearch",
+ "dos",
+ "dot",
+ "double",
+ "drag",
+ "dragrect",
+ "drawnow",
+ "dsearchn",
+ "duration",
+ "dynamicprops",
+ "echo",
+ "echodemo",
+ "echotcpip",
+ "edgeAttachments",
+ "edgecount",
+ "edges",
+ "edit",
+ "eig",
+ "eigs",
+ "ellipj",
+ "ellipke",
+ "ellipsoid",
+ "empty",
+ "enableDefaultInteractivity",
+ "enableLegacyExplorationModes",
+ "enableNETfromNetworkDrive",
+ "enableservice",
+ "endsWith",
+ "enumeration",
+ "eomday",
+ "eps",
+ "eq",
+ "equilibrate",
+ "erase",
+ "eraseBetween",
+ "erf",
+ "erfc",
+ "erfcinv",
+ "erfcx",
+ "erfinv",
+ "error",
+ "errorbar",
+ "errordlg",
+ "etime",
+ "etree",
+ "etreeplot",
+ "eval",
+ "evalc",
+ "evalin",
+ "event.ClassInstanceEvent",
+ "event.DynamicPropertyEvent",
+ "event.EventData",
+ "event.PropertyEvent",
+ "event.hasListener",
+ "event.listener",
+ "event.proplistener",
+ "eventlisteners",
+ "events",
+ "exceltime",
+ "exist",
+ "exit",
+ "exp",
+ "expand",
+ "expint",
+ "expm",
+ "expm1",
+ "export",
+ "export2wsdlg",
+ "exportapp",
+ "exportgraphics",
+ "exportsetupdlg",
+ "extract",
+ "extractAfter",
+ "extractBefore",
+ "extractBetween",
+ "eye",
+ "ezpolar",
+ "faceNormal",
+ "factor",
+ "factorial",
+ "false",
+ "fclose",
+ "fcontour",
+ "feather",
+ "featureEdges",
+ "feof",
+ "ferror",
+ "feval",
+ "fewerbins",
+ "fft",
+ "fft2",
+ "fftn",
+ "fftshift",
+ "fftw",
+ "fgetl",
+ "fgets",
+ "fieldnames",
+ "figure",
+ "figurepalette",
+ "fileDatastore",
+ "fileMode",
+ "fileName",
+ "fileattrib",
+ "filemarker",
+ "fileparts",
+ "fileread",
+ "filesep",
+ "fill",
+ "fill3",
+ "fillmissing",
+ "filloutliers",
+ "filter",
+ "filter2",
+ "fimplicit",
+ "fimplicit3",
+ "find",
+ "findCategory",
+ "findEvent",
+ "findFile",
+ "findLabel",
+ "findall",
+ "findedge",
+ "findfigs",
+ "findgroups",
+ "findnode",
+ "findobj",
+ "findprop",
+ "finish",
+ "fitsdisp",
+ "fitsinfo",
+ "fitsread",
+ "fitswrite",
+ "fix",
+ "fixedWidthImportOptions",
+ "flag",
+ "flintmax",
+ "flip",
+ "flipedge",
+ "fliplr",
+ "flipud",
+ "floor",
+ "flow",
+ "flush",
+ "fmesh",
+ "fminbnd",
+ "fminsearch",
+ "fopen",
+ "format",
+ "fplot",
+ "fplot3",
+ "fprintf",
+ "frame2im",
+ "fread",
+ "freeBoundary",
+ "freqspace",
+ "frewind",
+ "fscanf",
+ "fseek",
+ "fsurf",
+ "ftell",
+ "ftp",
+ "full",
+ "fullfile",
+ "func2str",
+ "function_handle",
+ "functions",
+ "functiontests",
+ "funm",
+ "fwrite",
+ "fzero",
+ "gallery",
+ "gamma",
+ "gammainc",
+ "gammaincinv",
+ "gammaln",
+ "gather",
+ "gca",
+ "gcbf",
+ "gcbo",
+ "gcd",
+ "gcf",
+ "gcmr",
+ "gco",
+ "genpath",
+ "geoaxes",
+ "geobasemap",
+ "geobubble",
+ "geodensityplot",
+ "geolimits",
+ "geoplot",
+ "geoscatter",
+ "geotickformat",
+ "get",
+ "getAColParms",
+ "getAxes",
+ "getBColParms",
+ "getColName",
+ "getColType",
+ "getColorbar",
+ "getConstantValue",
+ "getEqColType",
+ "getFileFormats",
+ "getHDUnum",
+ "getHDUtype",
+ "getHdrSpace",
+ "getImgSize",
+ "getImgType",
+ "getLayout",
+ "getLegend",
+ "getMockHistory",
+ "getNumCols",
+ "getNumHDUs",
+ "getNumInputs",
+ "getNumInputsImpl",
+ "getNumOutputs",
+ "getNumOutputsImpl",
+ "getNumRows",
+ "getOpenFiles",
+ "getProfiles",
+ "getPropertyGroupsImpl",
+ "getReport",
+ "getTimeStr",
+ "getVersion",
+ "getabstime",
+ "getappdata",
+ "getaudiodata",
+ "getdatasamples",
+ "getdatasamplesize",
+ "getenv",
+ "getfield",
+ "getframe",
+ "getinterpmethod",
+ "getnext",
+ "getpinstatus",
+ "getpixelposition",
+ "getplayer",
+ "getpoints",
+ "getpref",
+ "getqualitydesc",
+ "getrangefromclass",
+ "getsamples",
+ "getsampleusingtime",
+ "gettimeseriesnames",
+ "gettsafteratevent",
+ "gettsafterevent",
+ "gettsatevent",
+ "gettsbeforeatevent",
+ "gettsbeforeevent",
+ "gettsbetweenevents",
+ "getvaropts",
+ "ginput",
+ "gmres",
+ "gobjects",
+ "gplot",
+ "grabcode",
+ "gradient",
+ "graph",
+ "gray",
+ "grid",
+ "griddata",
+ "griddatan",
+ "griddedInterpolant",
+ "groot",
+ "groupcounts",
+ "groupfilter",
+ "groupsummary",
+ "grouptransform",
+ "gsvd",
+ "gtext",
+ "guidata",
+ "guide",
+ "guihandles",
+ "gunzip",
+ "gzip",
+ "h5create",
+ "h5disp",
+ "h5info",
+ "h5read",
+ "h5readatt",
+ "h5write",
+ "h5writeatt",
+ "hadamard",
+ "handle",
+ "hankel",
+ "hasFactoryValue",
+ "hasFrame",
+ "hasGroup",
+ "hasPersonalValue",
+ "hasSetting",
+ "hasTemporaryValue",
+ "hasdata",
+ "hasnext",
+ "hdfan",
+ "hdfdf24",
+ "hdfdfr8",
+ "hdfh",
+ "hdfhd",
+ "hdfhe",
+ "hdfhx",
+ "hdfinfo",
+ "hdfml",
+ "hdfpt",
+ "hdfread",
+ "hdfv",
+ "hdfvf",
+ "hdfvh",
+ "hdfvs",
+ "head",
+ "heatmap",
+ "height",
+ "help",
+ "helpdlg",
+ "hess",
+ "hex2dec",
+ "hex2num",
+ "hgexport",
+ "hggroup",
+ "hgtransform",
+ "hidden",
+ "highlight",
+ "hilb",
+ "histcounts",
+ "histcounts2",
+ "histogram",
+ "histogram2",
+ "hms",
+ "hold",
+ "holes",
+ "home",
+ "horzcat",
+ "hot",
+ "hour",
+ "hours",
+ "hover",
+ "hsv",
+ "hsv2rgb",
+ "hypot",
+ "i",
+ "ichol",
+ "idealfilter",
+ "idivide",
+ "ifft",
+ "ifft2",
+ "ifftn",
+ "ifftshift",
+ "ilu",
+ "im2double",
+ "im2frame",
+ "im2gray",
+ "im2java",
+ "imag",
+ "image",
+ "imageDatastore",
+ "imagesc",
+ "imapprox",
+ "imfinfo",
+ "imformats",
+ "imgCompress",
+ "import",
+ "importdata",
+ "imread",
+ "imresize",
+ "imshow",
+ "imtile",
+ "imwrite",
+ "inShape",
+ "incenter",
+ "incidence",
+ "ind2rgb",
+ "ind2sub",
+ "indegree",
+ "inedges",
+ "infoImpl",
+ "inmem",
+ "inner2outer",
+ "innerjoin",
+ "inpolygon",
+ "input",
+ "inputParser",
+ "inputdlg",
+ "inputname",
+ "insertATbl",
+ "insertAfter",
+ "insertBTbl",
+ "insertBefore",
+ "insertCol",
+ "insertImg",
+ "insertRows",
+ "int16",
+ "int2str",
+ "int32",
+ "int64",
+ "int8",
+ "integral",
+ "integral2",
+ "integral3",
+ "interp1",
+ "interp2",
+ "interp3",
+ "interpft",
+ "interpn",
+ "interpstreamspeed",
+ "intersect",
+ "intmax",
+ "intmin",
+ "inv",
+ "invhilb",
+ "ipermute",
+ "iqr",
+ "isCompressedImg",
+ "isConnected",
+ "isDiscreteStateSpecificationMutableImpl",
+ "isDone",
+ "isDoneImpl",
+ "isInactivePropertyImpl",
+ "isInputComplexityMutableImpl",
+ "isInputDataTypeMutableImpl",
+ "isInputSizeMutableImpl",
+ "isInterior",
+ "isKey",
+ "isLoaded",
+ "isLocked",
+ "isMATLABReleaseOlderThan",
+ "isPartitionable",
+ "isShuffleable",
+ "isStringScalar",
+ "isTunablePropertyDataTypeMutableImpl",
+ "isUnderlyingType",
+ "isa",
+ "isaUnderlying",
+ "isappdata",
+ "isbanded",
+ "isbetween",
+ "iscalendarduration",
+ "iscategorical",
+ "iscategory",
+ "iscell",
+ "iscellstr",
+ "ischange",
+ "ischar",
+ "iscolumn",
+ "iscom",
+ "isdag",
+ "isdatetime",
+ "isdiag",
+ "isdst",
+ "isduration",
+ "isempty",
+ "isenum",
+ "isequal",
+ "isequaln",
+ "isevent",
+ "isfield",
+ "isfile",
+ "isfinite",
+ "isfloat",
+ "isfolder",
+ "isgraphics",
+ "ishandle",
+ "ishermitian",
+ "ishold",
+ "ishole",
+ "isinf",
+ "isinteger",
+ "isinterface",
+ "isinterior",
+ "isisomorphic",
+ "isjava",
+ "iskeyword",
+ "isletter",
+ "islocalmax",
+ "islocalmin",
+ "islogical",
+ "ismac",
+ "ismatrix",
+ "ismember",
+ "ismembertol",
+ "ismethod",
+ "ismissing",
+ "ismultigraph",
+ "isnan",
+ "isnat",
+ "isnumeric",
+ "isobject",
+ "isocaps",
+ "isocolors",
+ "isomorphism",
+ "isonormals",
+ "isordinal",
+ "isosurface",
+ "isoutlier",
+ "ispc",
+ "isplaying",
+ "ispref",
+ "isprime",
+ "isprop",
+ "isprotected",
+ "isreal",
+ "isrecording",
+ "isregular",
+ "isrow",
+ "isscalar",
+ "issimplified",
+ "issorted",
+ "issortedrows",
+ "isspace",
+ "issparse",
+ "isstring",
+ "isstrprop",
+ "isstruct",
+ "isstudent",
+ "issymmetric",
+ "istable",
+ "istall",
+ "istimetable",
+ "istril",
+ "istriu",
+ "isundefined",
+ "isunix",
+ "isvalid",
+ "isvarname",
+ "isvector",
+ "isweekend",
+ "j",
+ "javaArray",
+ "javaMethod",
+ "javaMethodEDT",
+ "javaObject",
+ "javaObjectEDT",
+ "javaaddpath",
+ "javachk",
+ "javaclasspath",
+ "javarmpath",
+ "jet",
+ "join",
+ "jsondecode",
+ "jsonencode",
+ "juliandate",
+ "keyboard",
+ "keys",
+ "kron",
+ "labeledge",
+ "labelnode",
+ "lag",
+ "laplacian",
+ "lastwarn",
+ "layout",
+ "lcm",
+ "ldl",
+ "leapseconds",
+ "legend",
+ "legendre",
+ "length",
+ "letterBoundary",
+ "lettersPattern",
+ "lib.pointer",
+ "libfunctions",
+ "libfunctionsview",
+ "libisloaded",
+ "libpointer",
+ "libstruct",
+ "license",
+ "light",
+ "lightangle",
+ "lighting",
+ "lin2mu",
+ "line",
+ "lineBoundary",
+ "lines",
+ "linkaxes",
+ "linkdata",
+ "linkprop",
+ "linsolve",
+ "linspace",
+ "listModifiedFiles",
+ "listRequiredFiles",
+ "listdlg",
+ "listener",
+ "listfonts",
+ "load",
+ "loadObjectImpl",
+ "loadlibrary",
+ "loadobj",
+ "localfunctions",
+ "log",
+ "log10",
+ "log1p",
+ "log2",
+ "logical",
+ "loglog",
+ "logm",
+ "logspace",
+ "lookAheadBoundary",
+ "lookBehindBoundary",
+ "lookfor",
+ "lower",
+ "ls",
+ "lscov",
+ "lsqminnorm",
+ "lsqnonneg",
+ "lsqr",
+ "lu",
+ "magic",
+ "makehgtform",
+ "makima",
+ "mapreduce",
+ "mapreducer",
+ "maskedPattern",
+ "mat2cell",
+ "mat2str",
+ "matches",
+ "matchpairs",
+ "material",
+ "matfile",
+ "matlab.System",
+ "matlab.addons.disableAddon",
+ "matlab.addons.enableAddon",
+ "matlab.addons.install",
+ "matlab.addons.installedAddons",
+ "matlab.addons.isAddonEnabled",
+ "matlab.addons.toolbox.installToolbox",
+ "matlab.addons.toolbox.installedToolboxes",
+ "matlab.addons.toolbox.packageToolbox",
+ "matlab.addons.toolbox.toolboxVersion",
+ "matlab.addons.toolbox.uninstallToolbox",
+ "matlab.addons.uninstall",
+ "matlab.apputil.create",
+ "matlab.apputil.getInstalledAppInfo",
+ "matlab.apputil.install",
+ "matlab.apputil.package",
+ "matlab.apputil.run",
+ "matlab.apputil.uninstall",
+ "matlab.codetools.requiredFilesAndProducts",
+ "matlab.engine.FutureResult",
+ "matlab.engine.MatlabEngine",
+ "matlab.engine.connect_matlab",
+ "matlab.engine.engineName",
+ "matlab.engine.find_matlab",
+ "matlab.engine.isEngineShared",
+ "matlab.engine.shareEngine",
+ "matlab.engine.start_matlab",
+ "matlab.exception.JavaException",
+ "matlab.exception.PyException",
+ "matlab.graphics.chartcontainer.ChartContainer",
+ "matlab.graphics.chartcontainer.mixin.Colorbar",
+ "matlab.graphics.chartcontainer.mixin.Legend",
+ "matlab.io.Datastore",
+ "matlab.io.datastore.BlockedFileSet",
+ "matlab.io.datastore.DsFileReader",
+ "matlab.io.datastore.DsFileSet",
+ "matlab.io.datastore.FileSet",
+ "matlab.io.datastore.FileWritable",
+ "matlab.io.datastore.FoldersPropertyProvider",
+ "matlab.io.datastore.HadoopLocationBased",
+ "matlab.io.datastore.Partitionable",
+ "matlab.io.datastore.Shuffleable",
+ "matlab.io.hdf4.sd",
+ "matlab.io.hdfeos.gd",
+ "matlab.io.hdfeos.sw",
+ "matlab.io.saveVariablesToScript",
+ "matlab.lang.OnOffSwitchState",
+ "matlab.lang.correction.AppendArgumentsCorrection",
+ "matlab.lang.correction.ConvertToFunctionNotationCorrection",
+ "matlab.lang.correction.ReplaceIdentifierCorrection",
+ "matlab.lang.makeUniqueStrings",
+ "matlab.lang.makeValidName",
+ "matlab.mex.MexHost",
+ "matlab.mixin.Copyable",
+ "matlab.mixin.CustomDisplay",
+ "matlab.mixin.Heterogeneous",
+ "matlab.mixin.SetGet",
+ "matlab.mixin.SetGetExactNames",
+ "matlab.mixin.util.PropertyGroup",
+ "matlab.mock.AnyArguments",
+ "matlab.mock.InteractionHistory",
+ "matlab.mock.InteractionHistory.forMock",
+ "matlab.mock.MethodCallBehavior",
+ "matlab.mock.PropertyBehavior",
+ "matlab.mock.PropertyGetBehavior",
+ "matlab.mock.PropertySetBehavior",
+ "matlab.mock.TestCase",
+ "matlab.mock.actions.AssignOutputs",
+ "matlab.mock.actions.DoNothing",
+ "matlab.mock.actions.Invoke",
+ "matlab.mock.actions.ReturnStoredValue",
+ "matlab.mock.actions.StoreValue",
+ "matlab.mock.actions.ThrowException",
+ "matlab.mock.constraints.Occurred",
+ "matlab.mock.constraints.WasAccessed",
+ "matlab.mock.constraints.WasCalled",
+ "matlab.mock.constraints.WasSet",
+ "matlab.net.ArrayFormat",
+ "matlab.net.QueryParameter",
+ "matlab.net.URI",
+ "matlab.net.base64decode",
+ "matlab.net.base64encode",
+ "matlab.net.http.AuthInfo",
+ "matlab.net.http.AuthenticationScheme",
+ "matlab.net.http.Cookie",
+ "matlab.net.http.CookieInfo",
+ "matlab.net.http.Credentials",
+ "matlab.net.http.Disposition",
+ "matlab.net.http.HTTPException",
+ "matlab.net.http.HTTPOptions",
+ "matlab.net.http.HeaderField",
+ "matlab.net.http.LogRecord",
+ "matlab.net.http.MediaType",
+ "matlab.net.http.Message",
+ "matlab.net.http.MessageBody",
+ "matlab.net.http.MessageType",
+ "matlab.net.http.ProgressMonitor",
+ "matlab.net.http.ProtocolVersion",
+ "matlab.net.http.RequestLine",
+ "matlab.net.http.RequestMessage",
+ "matlab.net.http.RequestMethod",
+ "matlab.net.http.ResponseMessage",
+ "matlab.net.http.StartLine",
+ "matlab.net.http.StatusClass",
+ "matlab.net.http.StatusCode",
+ "matlab.net.http.StatusLine",
+ "matlab.net.http.field.AcceptField",
+ "matlab.net.http.field.AuthenticateField",
+ "matlab.net.http.field.AuthenticationInfoField",
+ "matlab.net.http.field.AuthorizationField",
+ "matlab.net.http.field.ContentDispositionField",
+ "matlab.net.http.field.ContentLengthField",
+ "matlab.net.http.field.ContentLocationField",
+ "matlab.net.http.field.ContentTypeField",
+ "matlab.net.http.field.CookieField",
+ "matlab.net.http.field.DateField",
+ "matlab.net.http.field.GenericField",
+ "matlab.net.http.field.GenericParameterizedField",
+ "matlab.net.http.field.HTTPDateField",
+ "matlab.net.http.field.IntegerField",
+ "matlab.net.http.field.LocationField",
+ "matlab.net.http.field.MediaRangeField",
+ "matlab.net.http.field.SetCookieField",
+ "matlab.net.http.field.URIReferenceField",
+ "matlab.net.http.io.BinaryConsumer",
+ "matlab.net.http.io.ContentConsumer",
+ "matlab.net.http.io.ContentProvider",
+ "matlab.net.http.io.FileConsumer",
+ "matlab.net.http.io.FileProvider",
+ "matlab.net.http.io.FormProvider",
+ "matlab.net.http.io.GenericConsumer",
+ "matlab.net.http.io.GenericProvider",
+ "matlab.net.http.io.ImageConsumer",
+ "matlab.net.http.io.ImageProvider",
+ "matlab.net.http.io.JSONConsumer",
+ "matlab.net.http.io.JSONProvider",
+ "matlab.net.http.io.MultipartConsumer",
+ "matlab.net.http.io.MultipartFormProvider",
+ "matlab.net.http.io.MultipartProvider",
+ "matlab.net.http.io.StringConsumer",
+ "matlab.net.http.io.StringProvider",
+ "matlab.perftest.FixedTimeExperiment",
+ "matlab.perftest.FrequentistTimeExperiment",
+ "matlab.perftest.TestCase",
+ "matlab.perftest.TimeExperiment",
+ "matlab.perftest.TimeResult",
+ "matlab.project.Project",
+ "matlab.project.convertDefinitionFiles",
+ "matlab.project.createProject",
+ "matlab.project.deleteProject",
+ "matlab.project.loadProject",
+ "matlab.project.rootProject",
+ "matlab.settings.FactoryGroup.createToolboxGroup",
+ "matlab.settings.SettingsFileUpgrader",
+ "matlab.settings.loadSettingsCompatibilityResults",
+ "matlab.settings.mustBeIntegerScalar",
+ "matlab.settings.mustBeLogicalScalar",
+ "matlab.settings.mustBeNumericScalar",
+ "matlab.settings.mustBeStringScalar",
+ "matlab.settings.reloadFactoryFile",
+ "matlab.system.mixin.FiniteSource",
+ "matlab.tall.blockMovingWindow",
+ "matlab.tall.movingWindow",
+ "matlab.tall.reduce",
+ "matlab.tall.transform",
+ "matlab.test.behavior.Missing",
+ "matlab.ui.componentcontainer.ComponentContainer",
+ "matlab.uitest.TestCase",
+ "matlab.uitest.TestCase.forInteractiveUse",
+ "matlab.uitest.unlock",
+ "matlab.unittest.Test",
+ "matlab.unittest.TestCase",
+ "matlab.unittest.TestResult",
+ "matlab.unittest.TestRunner",
+ "matlab.unittest.TestSuite",
+ "matlab.unittest.constraints.BooleanConstraint",
+ "matlab.unittest.constraints.Constraint",
+ "matlab.unittest.constraints.Tolerance",
+ "matlab.unittest.diagnostics.ConstraintDiagnostic",
+ "matlab.unittest.diagnostics.Diagnostic",
+ "matlab.unittest.fixtures.Fixture",
+ "matlab.unittest.measurement.DefaultMeasurementResult",
+ "matlab.unittest.measurement.MeasurementResult",
+ "matlab.unittest.measurement.chart.ComparisonPlot",
+ "matlab.unittest.plugins.OutputStream",
+ "matlab.unittest.plugins.Parallelizable",
+ "matlab.unittest.plugins.QualifyingPlugin",
+ "matlab.unittest.plugins.TestRunnerPlugin",
+ "matlab.wsdl.createWSDLClient",
+ "matlab.wsdl.setWSDLToolPath",
+ "matlabRelease",
+ "matlabrc",
+ "matlabroot",
+ "max",
+ "maxflow",
+ "maxk",
+ "mean",
+ "median",
+ "memmapfile",
+ "memoize",
+ "memory",
+ "mergecats",
+ "mergevars",
+ "mesh",
+ "meshc",
+ "meshgrid",
+ "meshz",
+ "meta.ArrayDimension",
+ "meta.DynamicProperty",
+ "meta.EnumeratedValue",
+ "meta.FixedDimension",
+ "meta.MetaData",
+ "meta.UnrestrictedDimension",
+ "meta.Validation",
+ "meta.abstractDetails",
+ "meta.class",
+ "meta.class.fromName",
+ "meta.event",
+ "meta.method",
+ "meta.package",
+ "meta.package.fromName",
+ "meta.package.getAllPackages",
+ "meta.property",
+ "metaclass",
+ "methods",
+ "methodsview",
+ "mex",
+ "mexext",
+ "mexhost",
+ "mfilename",
+ "mget",
+ "milliseconds",
+ "min",
+ "mink",
+ "minres",
+ "minspantree",
+ "minute",
+ "minutes",
+ "mislocked",
+ "missing",
+ "mkdir",
+ "mkpp",
+ "mldivide",
+ "mlintrpt",
+ "mlock",
+ "mmfileinfo",
+ "mod",
+ "mode",
+ "month",
+ "more",
+ "morebins",
+ "movAbsHDU",
+ "movNamHDU",
+ "movRelHDU",
+ "move",
+ "movefile",
+ "movegui",
+ "movevars",
+ "movie",
+ "movmad",
+ "movmax",
+ "movmean",
+ "movmedian",
+ "movmin",
+ "movprod",
+ "movstd",
+ "movsum",
+ "movvar",
+ "mpower",
+ "mput",
+ "mrdivide",
+ "msgbox",
+ "mtimes",
+ "mu2lin",
+ "multibandread",
+ "multibandwrite",
+ "munlock",
+ "mustBeA",
+ "mustBeFile",
+ "mustBeFinite",
+ "mustBeFloat",
+ "mustBeFolder",
+ "mustBeGreaterThan",
+ "mustBeGreaterThanOrEqual",
+ "mustBeInRange",
+ "mustBeInteger",
+ "mustBeLessThan",
+ "mustBeLessThanOrEqual",
+ "mustBeMember",
+ "mustBeNegative",
+ "mustBeNonNan",
+ "mustBeNonempty",
+ "mustBeNonmissing",
+ "mustBeNonnegative",
+ "mustBeNonpositive",
+ "mustBeNonsparse",
+ "mustBeNonzero",
+ "mustBeNonzeroLengthText",
+ "mustBeNumeric",
+ "mustBeNumericOrLogical",
+ "mustBePositive",
+ "mustBeReal",
+ "mustBeScalarOrEmpty",
+ "mustBeText",
+ "mustBeTextScalar",
+ "mustBeUnderlyingType",
+ "mustBeValidVariableName",
+ "mustBeVector",
+ "namedPattern",
+ "namedargs2cell",
+ "namelengthmax",
+ "nargin",
+ "narginchk",
+ "nargout",
+ "nargoutchk",
+ "native2unicode",
+ "nccreate",
+ "ncdisp",
+ "nchoosek",
+ "ncinfo",
+ "ncread",
+ "ncreadatt",
+ "ncwrite",
+ "ncwriteatt",
+ "ncwriteschema",
+ "ndgrid",
+ "ndims",
+ "nearest",
+ "nearestNeighbor",
+ "nearestvertex",
+ "neighbors",
+ "netcdf.abort",
+ "netcdf.close",
+ "netcdf.copyAtt",
+ "netcdf.create",
+ "netcdf.defDim",
+ "netcdf.defGrp",
+ "netcdf.defVar",
+ "netcdf.defVarChunking",
+ "netcdf.defVarDeflate",
+ "netcdf.defVarFill",
+ "netcdf.defVarFletcher32",
+ "netcdf.delAtt",
+ "netcdf.endDef",
+ "netcdf.getAtt",
+ "netcdf.getChunkCache",
+ "netcdf.getConstant",
+ "netcdf.getConstantNames",
+ "netcdf.getVar",
+ "netcdf.inq",
+ "netcdf.inqAtt",
+ "netcdf.inqAttID",
+ "netcdf.inqAttName",
+ "netcdf.inqDim",
+ "netcdf.inqDimID",
+ "netcdf.inqDimIDs",
+ "netcdf.inqFormat",
+ "netcdf.inqGrpName",
+ "netcdf.inqGrpNameFull",
+ "netcdf.inqGrpParent",
+ "netcdf.inqGrps",
+ "netcdf.inqLibVers",
+ "netcdf.inqNcid",
+ "netcdf.inqUnlimDims",
+ "netcdf.inqVar",
+ "netcdf.inqVarChunking",
+ "netcdf.inqVarDeflate",
+ "netcdf.inqVarFill",
+ "netcdf.inqVarFletcher32",
+ "netcdf.inqVarID",
+ "netcdf.inqVarIDs",
+ "netcdf.open",
+ "netcdf.putAtt",
+ "netcdf.putVar",
+ "netcdf.reDef",
+ "netcdf.renameAtt",
+ "netcdf.renameDim",
+ "netcdf.renameVar",
+ "netcdf.setChunkCache",
+ "netcdf.setDefaultFormat",
+ "netcdf.setFill",
+ "netcdf.sync",
+ "newline",
+ "newplot",
+ "nextpow2",
+ "nexttile",
+ "nnz",
+ "nonzeros",
+ "norm",
+ "normalize",
+ "normest",
+ "notify",
+ "now",
+ "nsidedpoly",
+ "nthroot",
+ "nufft",
+ "nufftn",
+ "null",
+ "num2cell",
+ "num2hex",
+ "num2ruler",
+ "num2str",
+ "numArgumentsFromSubscript",
+ "numRegions",
+ "numboundaries",
+ "numedges",
+ "numel",
+ "numnodes",
+ "numpartitions",
+ "numsides",
+ "nzmax",
+ "ode113",
+ "ode15i",
+ "ode15s",
+ "ode23",
+ "ode23s",
+ "ode23t",
+ "ode23tb",
+ "ode45",
+ "odeget",
+ "odeset",
+ "odextend",
+ "onCleanup",
+ "ones",
+ "open",
+ "openDiskFile",
+ "openFile",
+ "openProject",
+ "openfig",
+ "opengl",
+ "openvar",
+ "optimget",
+ "optimset",
+ "optionalPattern",
+ "ordeig",
+ "orderfields",
+ "ordqz",
+ "ordschur",
+ "orient",
+ "orth",
+ "outdegree",
+ "outedges",
+ "outerjoin",
+ "overlaps",
+ "overlapsrange",
+ "pack",
+ "pad",
+ "padecoef",
+ "pagectranspose",
+ "pagemtimes",
+ "pagetranspose",
+ "pan",
+ "panInteraction",
+ "parallelplot",
+ "pareto",
+ "parquetDatastore",
+ "parquetinfo",
+ "parquetread",
+ "parquetwrite",
+ "partition",
+ "parula",
+ "pascal",
+ "patch",
+ "path",
+ "pathsep",
+ "pathtool",
+ "pattern",
+ "pause",
+ "pbaspect",
+ "pcg",
+ "pchip",
+ "pcode",
+ "pcolor",
+ "pdepe",
+ "pdeval",
+ "peaks",
+ "perimeter",
+ "perl",
+ "perms",
+ "permute",
+ "pi",
+ "pie",
+ "pie3",
+ "pink",
+ "pinv",
+ "planerot",
+ "play",
+ "playblocking",
+ "plot",
+ "plot3",
+ "plotbrowser",
+ "plotedit",
+ "plotmatrix",
+ "plottools",
+ "plus",
+ "pointLocation",
+ "pol2cart",
+ "polaraxes",
+ "polarbubblechart",
+ "polarhistogram",
+ "polarplot",
+ "polarscatter",
+ "poly",
+ "polyarea",
+ "polybuffer",
+ "polyder",
+ "polyeig",
+ "polyfit",
+ "polyint",
+ "polyshape",
+ "polyval",
+ "polyvalm",
+ "posixtime",
+ "possessivePattern",
+ "pow2",
+ "ppval",
+ "predecessors",
+ "prefdir",
+ "preferences",
+ "press",
+ "preview",
+ "primes",
+ "print",
+ "printdlg",
+ "printopt",
+ "printpreview",
+ "prism",
+ "processInputSpecificationChangeImpl",
+ "processTunedPropertiesImpl",
+ "prod",
+ "profile",
+ "propedit",
+ "properties",
+ "propertyeditor",
+ "psi",
+ "publish",
+ "pwd",
+ "pyargs",
+ "pyenv",
+ "qmr",
+ "qr",
+ "qrdelete",
+ "qrinsert",
+ "qrupdate",
+ "quad2d",
+ "quadgk",
+ "quarter",
+ "questdlg",
+ "quit",
+ "quiver",
+ "quiver3",
+ "qz",
+ "rad2deg",
+ "rand",
+ "randi",
+ "randn",
+ "randperm",
+ "rank",
+ "rat",
+ "rats",
+ "rbbox",
+ "rcond",
+ "read",
+ "readATblHdr",
+ "readBTblHdr",
+ "readCard",
+ "readCol",
+ "readFrame",
+ "readImg",
+ "readKey",
+ "readKeyCmplx",
+ "readKeyDbl",
+ "readKeyLongLong",
+ "readKeyLongStr",
+ "readKeyUnit",
+ "readRecord",
+ "readall",
+ "readcell",
+ "readline",
+ "readlines",
+ "readmatrix",
+ "readstruct",
+ "readtable",
+ "readtimetable",
+ "readvars",
+ "real",
+ "reallog",
+ "realmax",
+ "realmin",
+ "realpow",
+ "realsqrt",
+ "record",
+ "recordblocking",
+ "rectangle",
+ "rectint",
+ "recycle",
+ "reducepatch",
+ "reducevolume",
+ "refresh",
+ "refreshSourceControl",
+ "refreshdata",
+ "regexp",
+ "regexpPattern",
+ "regexpi",
+ "regexprep",
+ "regexptranslate",
+ "regionZoomInteraction",
+ "regions",
+ "registerevent",
+ "regmatlabserver",
+ "rehash",
+ "relationaloperators",
+ "release",
+ "releaseImpl",
+ "reload",
+ "rem",
+ "remove",
+ "removeCategory",
+ "removeFile",
+ "removeGroup",
+ "removeLabel",
+ "removePath",
+ "removeReference",
+ "removeSetting",
+ "removeShortcut",
+ "removeShutdownFile",
+ "removeStartupFile",
+ "removeStyle",
+ "removeToolbarExplorationButtons",
+ "removecats",
+ "removets",
+ "removevars",
+ "rename",
+ "renamecats",
+ "renamevars",
+ "rendererinfo",
+ "reordercats",
+ "reordernodes",
+ "repelem",
+ "replace",
+ "replaceBetween",
+ "repmat",
+ "resample",
+ "rescale",
+ "reset",
+ "resetImpl",
+ "reshape",
+ "residue",
+ "restoredefaultpath",
+ "resume",
+ "rethrow",
+ "retime",
+ "reverse",
+ "rgb2gray",
+ "rgb2hsv",
+ "rgb2ind",
+ "rgbplot",
+ "ribbon",
+ "rlim",
+ "rmappdata",
+ "rmboundary",
+ "rmdir",
+ "rmedge",
+ "rmfield",
+ "rmholes",
+ "rmmissing",
+ "rmnode",
+ "rmoutliers",
+ "rmpath",
+ "rmpref",
+ "rmprop",
+ "rmslivers",
+ "rng",
+ "roots",
+ "rosser",
+ "rot90",
+ "rotate",
+ "rotate3d",
+ "rotateInteraction",
+ "round",
+ "rowfun",
+ "rows2vars",
+ "rref",
+ "rsf2csf",
+ "rtickangle",
+ "rtickformat",
+ "rticklabels",
+ "rticks",
+ "ruler2num",
+ "rulerPanInteraction",
+ "run",
+ "runChecks",
+ "runperf",
+ "runtests",
+ "save",
+ "saveObjectImpl",
+ "saveas",
+ "savefig",
+ "saveobj",
+ "savepath",
+ "scale",
+ "scatter",
+ "scatter3",
+ "scatteredInterpolant",
+ "scatterhistogram",
+ "schur",
+ "scroll",
+ "sec",
+ "secd",
+ "sech",
+ "second",
+ "seconds",
+ "semilogx",
+ "semilogy",
+ "sendmail",
+ "serialport",
+ "serialportlist",
+ "set",
+ "setBscale",
+ "setCompressionType",
+ "setDTR",
+ "setHCompScale",
+ "setHCompSmooth",
+ "setProperties",
+ "setRTS",
+ "setTileDim",
+ "setTscale",
+ "setabstime",
+ "setappdata",
+ "setcats",
+ "setdiff",
+ "setenv",
+ "setfield",
+ "setinterpmethod",
+ "setpixelposition",
+ "setpref",
+ "settimeseriesnames",
+ "settings",
+ "setuniformtime",
+ "setup",
+ "setupImpl",
+ "setvaropts",
+ "setvartype",
+ "setxor",
+ "sgtitle",
+ "shading",
+ "sheetnames",
+ "shg",
+ "shiftdim",
+ "shortestpath",
+ "shortestpathtree",
+ "showplottool",
+ "shrinkfaces",
+ "shuffle",
+ "sign",
+ "simplify",
+ "sin",
+ "sind",
+ "single",
+ "sinh",
+ "sinpi",
+ "size",
+ "slice",
+ "smooth3",
+ "smoothdata",
+ "snapnow",
+ "sort",
+ "sortboundaries",
+ "sortregions",
+ "sortrows",
+ "sortx",
+ "sorty",
+ "sound",
+ "soundsc",
+ "spalloc",
+ "sparse",
+ "spaugment",
+ "spconvert",
+ "spdiags",
+ "specular",
+ "speye",
+ "spfun",
+ "sph2cart",
+ "sphere",
+ "spinmap",
+ "spline",
+ "split",
+ "splitapply",
+ "splitlines",
+ "splitvars",
+ "spones",
+ "spparms",
+ "sprand",
+ "sprandn",
+ "sprandsym",
+ "sprank",
+ "spreadsheetDatastore",
+ "spreadsheetImportOptions",
+ "spring",
+ "sprintf",
+ "spy",
+ "sqrt",
+ "sqrtm",
+ "squeeze",
+ "ss2tf",
+ "sscanf",
+ "stack",
+ "stackedplot",
+ "stairs",
+ "standardizeMissing",
+ "start",
+ "startat",
+ "startsWith",
+ "startup",
+ "std",
+ "stem",
+ "stem3",
+ "step",
+ "stepImpl",
+ "stlread",
+ "stlwrite",
+ "stop",
+ "str2double",
+ "str2func",
+ "str2num",
+ "strcat",
+ "strcmp",
+ "strcmpi",
+ "stream2",
+ "stream3",
+ "streamline",
+ "streamparticles",
+ "streamribbon",
+ "streamslice",
+ "streamtube",
+ "strfind",
+ "string",
+ "strings",
+ "strip",
+ "strjoin",
+ "strjust",
+ "strlength",
+ "strncmp",
+ "strncmpi",
+ "strrep",
+ "strsplit",
+ "strtok",
+ "strtrim",
+ "struct",
+ "struct2cell",
+ "struct2table",
+ "structfun",
+ "sub2ind",
+ "subgraph",
+ "subplot",
+ "subsasgn",
+ "subscribe",
+ "subsindex",
+ "subspace",
+ "subsref",
+ "substruct",
+ "subtitle",
+ "subtract",
+ "subvolume",
+ "successors",
+ "sum",
+ "summary",
+ "summer",
+ "superclasses",
+ "surf",
+ "surf2patch",
+ "surface",
+ "surfaceArea",
+ "surfc",
+ "surfl",
+ "surfnorm",
+ "svd",
+ "svds",
+ "svdsketch",
+ "swapbytes",
+ "swarmchart",
+ "swarmchart3",
+ "sylvester",
+ "symamd",
+ "symbfact",
+ "symmlq",
+ "symrcm",
+ "synchronize",
+ "sysobjupdate",
+ "system",
+ "table",
+ "table2array",
+ "table2cell",
+ "table2struct",
+ "table2timetable",
+ "tabularTextDatastore",
+ "tail",
+ "tall",
+ "tallrng",
+ "tan",
+ "tand",
+ "tanh",
+ "tar",
+ "tcpclient",
+ "tempdir",
+ "tempname",
+ "testsuite",
+ "tetramesh",
+ "texlabel",
+ "text",
+ "textBoundary",
+ "textscan",
+ "textwrap",
+ "tfqmr",
+ "thetalim",
+ "thetatickformat",
+ "thetaticklabels",
+ "thetaticks",
+ "thingSpeakRead",
+ "thingSpeakWrite",
+ "throw",
+ "throwAsCaller",
+ "tic",
+ "tiledlayout",
+ "time",
+ "timeit",
+ "timeofday",
+ "timer",
+ "timerange",
+ "timerfind",
+ "timerfindall",
+ "timeseries",
+ "timetable",
+ "timetable2table",
+ "timezones",
+ "title",
+ "toc",
+ "todatenum",
+ "toeplitz",
+ "toolboxdir",
+ "topkrows",
+ "toposort",
+ "trace",
+ "transclosure",
+ "transform",
+ "translate",
+ "transpose",
+ "transreduction",
+ "trapz",
+ "treelayout",
+ "treeplot",
+ "triangulation",
+ "tril",
+ "trimesh",
+ "triplot",
+ "trisurf",
+ "triu",
+ "true",
+ "tscollection",
+ "tsdata.event",
+ "tsearchn",
+ "turbo",
+ "turningdist",
+ "type",
+ "typecast",
+ "tzoffset",
+ "uialert",
+ "uiaxes",
+ "uibutton",
+ "uibuttongroup",
+ "uicheckbox",
+ "uiconfirm",
+ "uicontextmenu",
+ "uicontrol",
+ "uidatepicker",
+ "uidropdown",
+ "uieditfield",
+ "uifigure",
+ "uigauge",
+ "uigetdir",
+ "uigetfile",
+ "uigetpref",
+ "uigridlayout",
+ "uihtml",
+ "uiimage",
+ "uiknob",
+ "uilabel",
+ "uilamp",
+ "uilistbox",
+ "uimenu",
+ "uint16",
+ "uint32",
+ "uint64",
+ "uint8",
+ "uiopen",
+ "uipanel",
+ "uiprogressdlg",
+ "uipushtool",
+ "uiputfile",
+ "uiradiobutton",
+ "uiresume",
+ "uisave",
+ "uisetcolor",
+ "uisetfont",
+ "uisetpref",
+ "uislider",
+ "uispinner",
+ "uistack",
+ "uistyle",
+ "uiswitch",
+ "uitab",
+ "uitabgroup",
+ "uitable",
+ "uitextarea",
+ "uitogglebutton",
+ "uitoggletool",
+ "uitoolbar",
+ "uitree",
+ "uitreenode",
+ "uiwait",
+ "uminus",
+ "underlyingType",
+ "underlyingValue",
+ "unicode2native",
+ "union",
+ "unique",
+ "uniquetol",
+ "unix",
+ "unloadlibrary",
+ "unmesh",
+ "unmkpp",
+ "unregisterallevents",
+ "unregisterevent",
+ "unstack",
+ "unsubscribe",
+ "untar",
+ "unwrap",
+ "unzip",
+ "update",
+ "updateDependencies",
+ "uplus",
+ "upper",
+ "usejava",
+ "userpath",
+ "validateFunctionSignaturesJSON",
+ "validateInputsImpl",
+ "validatePropertiesImpl",
+ "validateattributes",
+ "validatecolor",
+ "validatestring",
+ "values",
+ "vander",
+ "var",
+ "varargin",
+ "varargout",
+ "varfun",
+ "vartype",
+ "vecnorm",
+ "ver",
+ "verLessThan",
+ "version",
+ "vertcat",
+ "vertexAttachments",
+ "vertexNormal",
+ "view",
+ "viewmtx",
+ "visdiff",
+ "volume",
+ "volumebounds",
+ "voronoi",
+ "voronoiDiagram",
+ "voronoin",
+ "wait",
+ "waitbar",
+ "waitfor",
+ "waitforbuttonpress",
+ "warndlg",
+ "warning",
+ "waterfall",
+ "web",
+ "weboptions",
+ "webread",
+ "websave",
+ "webwrite",
+ "week",
+ "weekday",
+ "what",
+ "which",
+ "whitespaceBoundary",
+ "whitespacePattern",
+ "who",
+ "whos",
+ "width",
+ "wildcardPattern",
+ "wilkinson",
+ "winopen",
+ "winqueryreg",
+ "winter",
+ "withinrange",
+ "withtol",
+ "wordcloud",
+ "write",
+ "writeChecksum",
+ "writeCol",
+ "writeComment",
+ "writeDate",
+ "writeHistory",
+ "writeImg",
+ "writeKey",
+ "writeKeyUnit",
+ "writeVideo",
+ "writeall",
+ "writecell",
+ "writeline",
+ "writematrix",
+ "writestruct",
+ "writetable",
+ "writetimetable",
+ "xcorr",
+ "xcov",
+ "xlabel",
+ "xlim",
+ "xline",
+ "xmlread",
+ "xmlwrite",
+ "xor",
+ "xslt",
+ "xtickangle",
+ "xtickformat",
+ "xticklabels",
+ "xticks",
+ "year",
+ "years",
+ "ylabel",
+ "ylim",
+ "yline",
+ "ymd",
+ "ytickangle",
+ "ytickformat",
+ "yticklabels",
+ "yticks",
+ "yyaxis",
+ "yyyymmdd",
+ "zeros",
+ "zip",
+ "zlabel",
+ "zlim",
+ "zoom",
+ "zoomInteraction",
+ "ztickangle",
+ "ztickformat",
+ "zticklabels",
+ "zticks",
+ ],
+ prefix=r"(?<!\.)(", # Exclude field names
+ suffix=r")\b"
+ ),
+ Name.Builtin
+ ),
# line continuation with following comment:
- (r'(\.\.\.)(.*)$', bygroups(Keyword, Comment)),
-
- # command form:
- # "How MATLAB Recognizes Command Syntax" specifies that an operator
- # is recognized if it is either surrounded by spaces or by no
- # spaces on both sides (this allows distinguishing `cd ./foo` from
- # `cd ./ foo`.). Here, the regex checks that the first word in the
- # line is not followed by <spaces> and then
- # (equal | open-parenthesis | <operator><space> | <space>).
- (r'(?:^|(?<=;))(\s*)(\w+)(\s+)(?!=|\(|%s\s|\s)' % _operators,
- bygroups(Whitespace, Name, Whitespace), 'commandargs'),
-
- include('expressions')
+ (r'(\.\.\.)(.*)$', bygroups(Keyword, Comment)),
+
+ # command form:
+ # "How MATLAB Recognizes Command Syntax" specifies that an operator
+ # is recognized if it is either surrounded by spaces or by no
+ # spaces on both sides (this allows distinguishing `cd ./foo` from
+ # `cd ./ foo`.). Here, the regex checks that the first word in the
+ # line is not followed by <spaces> and then
+ # (equal | open-parenthesis | <operator><space> | <space>).
+ (r'(?:^|(?<=;))(\s*)(\w+)(\s+)(?!=|\(|%s\s|\s)' % _operators,
+ bygroups(Whitespace, Name, Whitespace), 'commandargs'),
+
+ include('expressions')
],
'blockcomment': [
(r'^\s*%\}', Comment.Multiline, '#pop'),
@@ -2676,62 +2676,62 @@ class MatlabLexer(RegexLexer):
(r'.', Comment.Multiline),
],
'deffunc': [
- (r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+ (r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
bygroups(Whitespace, Text, Whitespace, Punctuation,
Whitespace, Name.Function, Punctuation, Text,
Punctuation, Whitespace), '#pop'),
# function with no args
- (r'(\s*)([a-zA-Z_]\w*)',
- bygroups(Whitespace, Name.Function), '#pop'),
+ (r'(\s*)([a-zA-Z_]\w*)',
+ bygroups(Whitespace, Name.Function), '#pop'),
],
- 'propattrs': [
- (r'(\w+)(\s*)(=)(\s*)(\d+)',
- bygroups(Name.Builtin, Whitespace, Punctuation, Whitespace,
- Number)),
- (r'(\w+)(\s*)(=)(\s*)([a-zA-Z]\w*)',
- bygroups(Name.Builtin, Whitespace, Punctuation, Whitespace,
- Keyword)),
- (r',', Punctuation),
- (r'\)', Punctuation, '#pop'),
- (r'\s+', Whitespace),
- (r'.', Text),
- ],
- 'defprops': [
- (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
- (r'%.*$', Comment),
- (r'(?<!\.)end\b', Keyword, '#pop'),
- include('expressions'),
- ],
- 'string': [
- (r"[^']*'", String, '#pop'),
- ],
- 'commandargs': [
- # If an equal sign or other operator is encountered, this
- # isn't a command. It might be a variable assignment or
- # comparison operation with multiple spaces before the
- # equal sign or operator
- (r"=", Punctuation, '#pop'),
- (_operators, Operator, '#pop'),
- (r"[ \t]+", Whitespace),
- ("'[^']*'", String),
- (r"[^';\s]+", String),
- (";", Punctuation, '#pop'),
- default('#pop'),
- ]
+ 'propattrs': [
+ (r'(\w+)(\s*)(=)(\s*)(\d+)',
+ bygroups(Name.Builtin, Whitespace, Punctuation, Whitespace,
+ Number)),
+ (r'(\w+)(\s*)(=)(\s*)([a-zA-Z]\w*)',
+ bygroups(Name.Builtin, Whitespace, Punctuation, Whitespace,
+ Keyword)),
+ (r',', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ (r'\s+', Whitespace),
+ (r'.', Text),
+ ],
+ 'defprops': [
+ (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
+ (r'%.*$', Comment),
+ (r'(?<!\.)end\b', Keyword, '#pop'),
+ include('expressions'),
+ ],
+ 'string': [
+ (r"[^']*'", String, '#pop'),
+ ],
+ 'commandargs': [
+ # If an equal sign or other operator is encountered, this
+ # isn't a command. It might be a variable assignment or
+ # comparison operation with multiple spaces before the
+ # equal sign or operator
+ (r"=", Punctuation, '#pop'),
+ (_operators, Operator, '#pop'),
+ (r"[ \t]+", Whitespace),
+ ("'[^']*'", String),
+ (r"[^';\s]+", String),
+ (";", Punctuation, '#pop'),
+ default('#pop'),
+ ]
}
def analyse_text(text):
- # function declaration.
- first_non_comment = next((line for line in text.splitlines()
- if not re.match(r'^\s*%', text)), '').strip()
- if (first_non_comment.startswith('function')
- and '{' not in first_non_comment):
- return 1.
- # comment
- elif re.search(r'^\s*%', text, re.M):
+ # function declaration.
+ first_non_comment = next((line for line in text.splitlines()
+ if not re.match(r'^\s*%', text)), '').strip()
+ if (first_non_comment.startswith('function')
+ and '{' not in first_non_comment):
+ return 1.
+ # comment
+ elif re.search(r'^\s*%', text, re.M):
return 0.2
- # system cmd
- elif re.search(r'^!\w+', text, re.M):
+ # system cmd
+ elif re.search(r'^!\w+', text, re.M):
return 0.2
@@ -2753,7 +2753,7 @@ class MatlabSessionLexer(Lexer):
curcode = ''
insertions = []
- continuation = False
+ continuation = False
for match in line_re.finditer(text):
line = match.group()
@@ -2776,36 +2776,36 @@ class MatlabSessionLexer(Lexer):
# line = "\n" + line
token = (0, Generic.Traceback, line)
insertions.append((idx, [token]))
- elif continuation:
- # line_start is the length of the most recent prompt symbol
- line_start = len(insertions[-1][-1][-1])
- # Set leading spaces with the length of the prompt to be a generic prompt
- # This keeps code aligned when prompts are removed, say with some Javascript
- if line.startswith(' '*line_start):
- insertions.append(
- (len(curcode), [(0, Generic.Prompt, line[:line_start])]))
- curcode += line[line_start:]
- else:
- curcode += line
+ elif continuation:
+ # line_start is the length of the most recent prompt symbol
+ line_start = len(insertions[-1][-1][-1])
+ # Set leading spaces with the length of the prompt to be a generic prompt
+ # This keeps code aligned when prompts are removed, say with some Javascript
+ if line.startswith(' '*line_start):
+ insertions.append(
+ (len(curcode), [(0, Generic.Prompt, line[:line_start])]))
+ curcode += line[line_start:]
+ else:
+ curcode += line
else:
if curcode:
- yield from do_insertions(
- insertions, mlexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(
+ insertions, mlexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
yield match.start(), Generic.Output, line
- # Does not allow continuation if a comment is included after the ellipses.
- # Continues any line that ends with ..., even comments (lines that start with %)
- if line.strip().endswith('...'):
- continuation = True
- else:
- continuation = False
-
+ # Does not allow continuation if a comment is included after the ellipses.
+ # Continues any line that ends with ..., even comments (lines that start with %)
+ if line.strip().endswith('...'):
+ continuation = True
+ else:
+ continuation = False
+
if curcode: # or item:
- yield from do_insertions(
- insertions, mlexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(
+ insertions, mlexer.get_tokens_unprocessed(curcode))
class OctaveLexer(RegexLexer):
@@ -3145,21 +3145,21 @@ class OctaveLexer(RegexLexer):
tokens = {
'root': [
- (r'%\{\s*\n', Comment.Multiline, 'percentblockcomment'),
- (r'#\{\s*\n', Comment.Multiline, 'hashblockcomment'),
+ (r'%\{\s*\n', Comment.Multiline, 'percentblockcomment'),
+ (r'#\{\s*\n', Comment.Multiline, 'hashblockcomment'),
(r'[%#].*$', Comment),
- (r'^\s*function\b', Keyword, 'deffunc'),
+ (r'^\s*function\b', Keyword, 'deffunc'),
# from 'iskeyword' on hg changeset 8cc154f45e37
(words((
- '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef',
- 'continue', 'do', 'else', 'elseif', 'end', 'end_try_catch',
- 'end_unwind_protect', 'endclassdef', 'endevents', 'endfor',
- 'endfunction', 'endif', 'endmethods', 'endproperties', 'endswitch',
- 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if',
- 'methods', 'otherwise', 'persistent', 'properties', 'return',
- 'set', 'static', 'switch', 'try', 'until', 'unwind_protect',
- 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
+ '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef',
+ 'continue', 'do', 'else', 'elseif', 'end', 'end_try_catch',
+ 'end_unwind_protect', 'endclassdef', 'endevents', 'endfor',
+ 'endfunction', 'endif', 'endmethods', 'endproperties', 'endswitch',
+ 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if',
+ 'methods', 'otherwise', 'persistent', 'properties', 'return',
+ 'set', 'static', 'switch', 'try', 'until', 'unwind_protect',
+ 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
Keyword),
(words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw,
@@ -3193,38 +3193,38 @@ class OctaveLexer(RegexLexer):
(r'(?<![\w)\].])\'', String, 'string'),
(r'[a-zA-Z_]\w*', Name),
- (r'\s+', Text),
+ (r'\s+', Text),
(r'.', Text),
],
- 'percentblockcomment': [
- (r'^\s*%\}', Comment.Multiline, '#pop'),
- (r'^.*\n', Comment.Multiline),
- (r'.', Comment.Multiline),
- ],
- 'hashblockcomment': [
- (r'^\s*#\}', Comment.Multiline, '#pop'),
- (r'^.*\n', Comment.Multiline),
- (r'.', Comment.Multiline),
- ],
+ 'percentblockcomment': [
+ (r'^\s*%\}', Comment.Multiline, '#pop'),
+ (r'^.*\n', Comment.Multiline),
+ (r'.', Comment.Multiline),
+ ],
+ 'hashblockcomment': [
+ (r'^\s*#\}', Comment.Multiline, '#pop'),
+ (r'^.*\n', Comment.Multiline),
+ (r'.', Comment.Multiline),
+ ],
'string': [
(r"[^']*'", String, '#pop'),
],
'deffunc': [
- (r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+ (r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
bygroups(Whitespace, Text, Whitespace, Punctuation,
Whitespace, Name.Function, Punctuation, Text,
Punctuation, Whitespace), '#pop'),
# function with no args
- (r'(\s*)([a-zA-Z_]\w*)',
- bygroups(Whitespace, Name.Function), '#pop'),
+ (r'(\s*)([a-zA-Z_]\w*)',
+ bygroups(Whitespace, Name.Function), '#pop'),
],
}
- def analyse_text(text):
- """Octave is quite hard to spot, and it looks like Matlab as well."""
- return 0
-
+ def analyse_text(text):
+ """Octave is quite hard to spot, and it looks like Matlab as well."""
+ return 0
+
class ScilabLexer(RegexLexer):
"""
For Scilab source code.
@@ -3239,7 +3239,7 @@ class ScilabLexer(RegexLexer):
tokens = {
'root': [
(r'//.*?$', Comment.Single),
- (r'^\s*function\b', Keyword, 'deffunc'),
+ (r'^\s*function\b', Keyword, 'deffunc'),
(words((
'__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
@@ -3284,7 +3284,7 @@ class ScilabLexer(RegexLexer):
(r'.', String, '#pop'),
],
'deffunc': [
- (r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+ (r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
bygroups(Whitespace, Text, Whitespace, Punctuation,
Whitespace, Name.Function, Punctuation, Text,
Punctuation, Whitespace), '#pop'),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/meson.py b/contrib/python/Pygments/py3/pygments/lexers/meson.py
index 47db014187..990aa5ee2f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/meson.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/meson.py
@@ -1,155 +1,155 @@
-"""
- pygments.lexers.meson
- ~~~~~~~~~~~~~~~~~~~~~
-
- Pygments lexer for the Meson build system
-
+"""
+ pygments.lexers.meson
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Pygments lexer for the Meson build system
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import (
- RegexLexer,
- words,
- include,
-)
-from pygments.token import (
- Comment,
- Name,
- Number,
- Punctuation,
- Operator,
- Keyword,
- String,
- Whitespace,
-)
-
-__all__ = ['MesonLexer']
-
-
-class MesonLexer(RegexLexer):
- """
- `meson <https://mesonbuild.com/>`_ language lexer.
- The grammar definition use to transcribe the syntax was retrieved from
- https://mesonbuild.com/Syntax.html#grammar for version 0.58
- Some of those definitions are improperly transcribed so the Meson++
- implementation was also checked: https://github.com/dcbaker/meson-plus-plus
-
- .. versionadded:: 2.10
- """
-
- # TODO String interpolation @VARNAME@ inner matches
- # TODO keyword_arg: value inner matches
-
- name = 'Meson'
- aliases = ['meson', 'meson.build']
- filenames = ['meson.build', 'meson_options.txt']
- mimetypes = ['text/x-meson']
-
- flags = re.MULTILINE | re.UNICODE
-
- tokens = {
- 'root': [
- (r'#.*?$', Comment),
- (r"'''.*'''", String.Single),
- (r'[1-9][0-9]*', Number.Integer),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- include('string'),
- include('keywords'),
- include('expr'),
- (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
- (r'\s+', Whitespace),
- ],
- 'string': [
- (r"[']{3}([']{0,2}([^\\']|\\(.|\n)))*[']{3}", String),
- (r"'.*?(?<!\\)(\\\\)*?'", String),
- ],
- 'keywords': [
- (words((
- 'if',
- 'elif',
- 'else',
- 'endif',
- 'foreach',
- 'endforeach',
- 'break',
- 'continue',
- ),
- suffix=r'\b'), Keyword),
- ],
- 'expr': [
- (r'(in|and|or|not)\b', Operator.Word),
- (r'(\*=|/=|%=|\+]=|-=|==|!=|\+|-|=)', Operator),
- (r'[\[\]{}:().,?]', Punctuation),
- (words(('true', 'false'), suffix=r'\b'), Keyword.Constant),
- include('builtins'),
- (words((
- 'meson',
- 'build_machine',
- 'host_machine',
- 'target_machine',
- ),
- suffix=r'\b'), Name.Variable.Magic),
- ],
- 'builtins': [
- # This list was extracted from the v0.58 reference manual
- (words((
- 'add_global_arguments',
- 'add_global_link_arguments',
- 'add_languages',
- 'add_project_arguments',
- 'add_project_link_arguments',
- 'add_test_setup',
- 'assert',
- 'benchmark',
- 'both_libraries',
- 'build_target',
- 'configuration_data',
- 'configure_file',
- 'custom_target',
- 'declare_dependency',
- 'dependency',
- 'disabler',
- 'environment',
- 'error',
- 'executable',
- 'files',
- 'find_library',
- 'find_program',
- 'generator',
- 'get_option',
- 'get_variable',
- 'include_directories',
- 'install_data',
- 'install_headers',
- 'install_man',
- 'install_subdir',
- 'is_disabler',
- 'is_variable',
- 'jar',
- 'join_paths',
- 'library',
- 'message',
- 'project',
- 'range',
- 'run_command',
- 'set_variable',
- 'shared_library',
- 'shared_module',
- 'static_library',
- 'subdir',
- 'subdir_done',
- 'subproject',
- 'summary',
- 'test',
- 'vcs_tag',
- 'warning',
- ),
- prefix=r'(?<!\.)',
- suffix=r'\b'), Name.Builtin),
- (r'(?<!\.)import\b', Name.Namespace),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import (
+ RegexLexer,
+ words,
+ include,
+)
+from pygments.token import (
+ Comment,
+ Name,
+ Number,
+ Punctuation,
+ Operator,
+ Keyword,
+ String,
+ Whitespace,
+)
+
+__all__ = ['MesonLexer']
+
+
+class MesonLexer(RegexLexer):
+ """
+ `meson <https://mesonbuild.com/>`_ language lexer.
+ The grammar definition use to transcribe the syntax was retrieved from
+ https://mesonbuild.com/Syntax.html#grammar for version 0.58
+ Some of those definitions are improperly transcribed so the Meson++
+ implementation was also checked: https://github.com/dcbaker/meson-plus-plus
+
+ .. versionadded:: 2.10
+ """
+
+ # TODO String interpolation @VARNAME@ inner matches
+ # TODO keyword_arg: value inner matches
+
+ name = 'Meson'
+ aliases = ['meson', 'meson.build']
+ filenames = ['meson.build', 'meson_options.txt']
+ mimetypes = ['text/x-meson']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ tokens = {
+ 'root': [
+ (r'#.*?$', Comment),
+ (r"'''.*'''", String.Single),
+ (r'[1-9][0-9]*', Number.Integer),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ include('string'),
+ include('keywords'),
+ include('expr'),
+ (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
+ (r'\s+', Whitespace),
+ ],
+ 'string': [
+ (r"[']{3}([']{0,2}([^\\']|\\(.|\n)))*[']{3}", String),
+ (r"'.*?(?<!\\)(\\\\)*?'", String),
+ ],
+ 'keywords': [
+ (words((
+ 'if',
+ 'elif',
+ 'else',
+ 'endif',
+ 'foreach',
+ 'endforeach',
+ 'break',
+ 'continue',
+ ),
+ suffix=r'\b'), Keyword),
+ ],
+ 'expr': [
+ (r'(in|and|or|not)\b', Operator.Word),
+ (r'(\*=|/=|%=|\+]=|-=|==|!=|\+|-|=)', Operator),
+ (r'[\[\]{}:().,?]', Punctuation),
+ (words(('true', 'false'), suffix=r'\b'), Keyword.Constant),
+ include('builtins'),
+ (words((
+ 'meson',
+ 'build_machine',
+ 'host_machine',
+ 'target_machine',
+ ),
+ suffix=r'\b'), Name.Variable.Magic),
+ ],
+ 'builtins': [
+ # This list was extracted from the v0.58 reference manual
+ (words((
+ 'add_global_arguments',
+ 'add_global_link_arguments',
+ 'add_languages',
+ 'add_project_arguments',
+ 'add_project_link_arguments',
+ 'add_test_setup',
+ 'assert',
+ 'benchmark',
+ 'both_libraries',
+ 'build_target',
+ 'configuration_data',
+ 'configure_file',
+ 'custom_target',
+ 'declare_dependency',
+ 'dependency',
+ 'disabler',
+ 'environment',
+ 'error',
+ 'executable',
+ 'files',
+ 'find_library',
+ 'find_program',
+ 'generator',
+ 'get_option',
+ 'get_variable',
+ 'include_directories',
+ 'install_data',
+ 'install_headers',
+ 'install_man',
+ 'install_subdir',
+ 'is_disabler',
+ 'is_variable',
+ 'jar',
+ 'join_paths',
+ 'library',
+ 'message',
+ 'project',
+ 'range',
+ 'run_command',
+ 'set_variable',
+ 'shared_library',
+ 'shared_module',
+ 'static_library',
+ 'subdir',
+ 'subdir_done',
+ 'subproject',
+ 'summary',
+ 'test',
+ 'vcs_tag',
+ 'warning',
+ ),
+ prefix=r'(?<!\.)',
+ suffix=r'\b'), Name.Builtin),
+ (r'(?<!\.)import\b', Name.Namespace),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/mime.py b/contrib/python/Pygments/py3/pygments/lexers/mime.py
index 6dcc81556e..8d28669c00 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/mime.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/mime.py
@@ -1,210 +1,210 @@
-"""
- pygments.lexers.mime
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Multipurpose Internet Mail Extensions (MIME) data.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.lexers import get_lexer_for_mimetype
-from pygments.token import Text, Name, String, Operator, Comment, Other
-from pygments.util import get_int_opt, ClassNotFound
-
-__all__ = ["MIMELexer"]
-
-
-class MIMELexer(RegexLexer):
- """
- Lexer for Multipurpose Internet Mail Extensions (MIME) data. This lexer is
- designed to process nested multipart data.
-
- It assumes that the given data contains both header and body (and is
- split at an empty line). If no valid header is found, then the entire data
- will be treated as body.
-
- Additional options accepted:
-
- `MIME-max-level`
- Max recursion level for nested MIME structure. Any negative number
- would treated as unlimited. (default: -1)
-
- `Content-Type`
- Treat the data as a specific content type. Useful when header is
- missing, or this lexer would try to parse from header. (default:
- `text/plain`)
-
- `Multipart-Boundary`
- Set the default multipart boundary delimiter. This option is only used
- when `Content-Type` is `multipart` and header is missing. This lexer
- would try to parse from header by default. (default: None)
-
- `Content-Transfer-Encoding`
- Treat the data as a specific encoding. Or this lexer would try to parse
- from header by default. (default: None)
-
- .. versionadded:: 2.5
- """
-
- name = "MIME"
- aliases = ["mime"]
- mimetypes = ["multipart/mixed",
- "multipart/related",
- "multipart/alternative"]
-
- def __init__(self, **options):
- super().__init__(**options)
- self.boundary = options.get("Multipart-Boundary")
- self.content_transfer_encoding = options.get("Content_Transfer_Encoding")
- self.content_type = options.get("Content_Type", "text/plain")
- self.max_nested_level = get_int_opt(options, "MIME-max-level", -1)
-
- def get_header_tokens(self, match):
- field = match.group(1)
-
- if field.lower() in self.attention_headers:
- yield match.start(1), Name.Tag, field + ":"
- yield match.start(2), Text.Whitespace, match.group(2)
-
- pos = match.end(2)
- body = match.group(3)
- for i, t, v in self.get_tokens_unprocessed(body, ("root", field.lower())):
- yield pos + i, t, v
-
- else:
- yield match.start(), Comment, match.group()
-
- def get_body_tokens(self, match):
- pos_body_start = match.start()
- entire_body = match.group()
-
- # skip first newline
- if entire_body[0] == '\n':
- yield pos_body_start, Text.Whitespace, '\n'
- pos_body_start = pos_body_start + 1
- entire_body = entire_body[1:]
-
- # if it is not a mulitpart
- if not self.content_type.startswith("multipart") or not self.boundary:
- for i, t, v in self.get_bodypart_tokens(entire_body):
- yield pos_body_start + i, t, v
- return
-
- # find boundary
- bdry_pattern = r"^--%s(--)?\n" % re.escape(self.boundary)
- bdry_matcher = re.compile(bdry_pattern, re.MULTILINE)
-
- # some data has prefix text before first boundary
- m = bdry_matcher.search(entire_body)
- if m:
- pos_part_start = pos_body_start + m.end()
- pos_iter_start = lpos_end = m.end()
- yield pos_body_start, Text, entire_body[:m.start()]
- yield pos_body_start + lpos_end, String.Delimiter, m.group()
- else:
- pos_part_start = pos_body_start
- pos_iter_start = 0
-
- # process tokens of each body part
- for m in bdry_matcher.finditer(entire_body, pos_iter_start):
- # bodypart
- lpos_start = pos_part_start - pos_body_start
- lpos_end = m.start()
- part = entire_body[lpos_start:lpos_end]
- for i, t, v in self.get_bodypart_tokens(part):
- yield pos_part_start + i, t, v
-
- # boundary
- yield pos_body_start + lpos_end, String.Delimiter, m.group()
- pos_part_start = pos_body_start + m.end()
-
- # some data has suffix text after last boundary
- lpos_start = pos_part_start - pos_body_start
- if lpos_start != len(entire_body):
- yield pos_part_start, Text, entire_body[lpos_start:]
-
- def get_bodypart_tokens(self, text):
- # return if:
- # * no content
- # * no content type specific
- # * content encoding is not readable
- # * max recurrsion exceed
- if not text.strip() or not self.content_type:
- return [(0, Other, text)]
-
- cte = self.content_transfer_encoding
- if cte and cte not in {"8bit", "7bit", "quoted-printable"}:
- return [(0, Other, text)]
-
- if self.max_nested_level == 0:
- return [(0, Other, text)]
-
- # get lexer
- try:
- lexer = get_lexer_for_mimetype(self.content_type)
- except ClassNotFound:
- return [(0, Other, text)]
-
- if isinstance(lexer, type(self)):
- lexer.max_nested_level = self.max_nested_level - 1
-
- return lexer.get_tokens_unprocessed(text)
-
- def store_content_type(self, match):
- self.content_type = match.group(1)
-
- prefix_len = match.start(1) - match.start(0)
- yield match.start(0), Text.Whitespace, match.group(0)[:prefix_len]
- yield match.start(1), Name.Label, match.group(2)
- yield match.end(2), String.Delimiter, '/'
- yield match.start(3), Name.Label, match.group(3)
-
- def get_content_type_subtokens(self, match):
- yield match.start(1), Text, match.group(1)
- yield match.start(2), Text.Whitespace, match.group(2)
- yield match.start(3), Name.Attribute, match.group(3)
- yield match.start(4), Operator, match.group(4)
- yield match.start(5), String, match.group(5)
-
- if match.group(3).lower() == "boundary":
- boundary = match.group(5).strip()
- if boundary[0] == '"' and boundary[-1] == '"':
- boundary = boundary[1:-1]
- self.boundary = boundary
-
- def store_content_transfer_encoding(self, match):
- self.content_transfer_encoding = match.group(0).lower()
- yield match.start(0), Name.Constant, match.group(0)
-
- attention_headers = {"content-type", "content-transfer-encoding"}
-
- tokens = {
- "root": [
- (r"^([\w-]+):( *)([\s\S]*?\n)(?![ \t])", get_header_tokens),
- (r"^$[\s\S]+", get_body_tokens),
- ],
- "header": [
- # folding
- (r"\n[ \t]", Text.Whitespace),
- (r"\n(?![ \t])", Text.Whitespace, "#pop"),
- ],
- "content-type": [
- include("header"),
- (
- r"^\s*((multipart|application|audio|font|image|model|text|video"
- r"|message)/([\w-]+))",
- store_content_type,
- ),
- (r'(;)((?:[ \t]|\n[ \t])*)([\w:-]+)(=)([\s\S]*?)(?=;|\n(?![ \t]))',
- get_content_type_subtokens),
- (r';[ \t]*\n(?![ \t])', Text, '#pop'),
- ],
- "content-transfer-encoding": [
- include("header"),
- (r"([\w-]+)", store_content_transfer_encoding),
- ],
- }
+"""
+ pygments.lexers.mime
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Multipurpose Internet Mail Extensions (MIME) data.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.lexers import get_lexer_for_mimetype
+from pygments.token import Text, Name, String, Operator, Comment, Other
+from pygments.util import get_int_opt, ClassNotFound
+
+__all__ = ["MIMELexer"]
+
+
+class MIMELexer(RegexLexer):
+ """
+ Lexer for Multipurpose Internet Mail Extensions (MIME) data. This lexer is
+ designed to process nested multipart data.
+
+ It assumes that the given data contains both header and body (and is
+ split at an empty line). If no valid header is found, then the entire data
+ will be treated as body.
+
+ Additional options accepted:
+
+ `MIME-max-level`
+ Max recursion level for nested MIME structure. Any negative number
+ would treated as unlimited. (default: -1)
+
+ `Content-Type`
+ Treat the data as a specific content type. Useful when header is
+ missing, or this lexer would try to parse from header. (default:
+ `text/plain`)
+
+ `Multipart-Boundary`
+ Set the default multipart boundary delimiter. This option is only used
+ when `Content-Type` is `multipart` and header is missing. This lexer
+ would try to parse from header by default. (default: None)
+
+ `Content-Transfer-Encoding`
+ Treat the data as a specific encoding. Or this lexer would try to parse
+ from header by default. (default: None)
+
+ .. versionadded:: 2.5
+ """
+
+ name = "MIME"
+ aliases = ["mime"]
+ mimetypes = ["multipart/mixed",
+ "multipart/related",
+ "multipart/alternative"]
+
+ def __init__(self, **options):
+ super().__init__(**options)
+ self.boundary = options.get("Multipart-Boundary")
+ self.content_transfer_encoding = options.get("Content_Transfer_Encoding")
+ self.content_type = options.get("Content_Type", "text/plain")
+ self.max_nested_level = get_int_opt(options, "MIME-max-level", -1)
+
+ def get_header_tokens(self, match):
+ field = match.group(1)
+
+ if field.lower() in self.attention_headers:
+ yield match.start(1), Name.Tag, field + ":"
+ yield match.start(2), Text.Whitespace, match.group(2)
+
+ pos = match.end(2)
+ body = match.group(3)
+ for i, t, v in self.get_tokens_unprocessed(body, ("root", field.lower())):
+ yield pos + i, t, v
+
+ else:
+ yield match.start(), Comment, match.group()
+
+ def get_body_tokens(self, match):
+ pos_body_start = match.start()
+ entire_body = match.group()
+
+ # skip first newline
+ if entire_body[0] == '\n':
+ yield pos_body_start, Text.Whitespace, '\n'
+ pos_body_start = pos_body_start + 1
+ entire_body = entire_body[1:]
+
+ # if it is not a mulitpart
+ if not self.content_type.startswith("multipart") or not self.boundary:
+ for i, t, v in self.get_bodypart_tokens(entire_body):
+ yield pos_body_start + i, t, v
+ return
+
+ # find boundary
+ bdry_pattern = r"^--%s(--)?\n" % re.escape(self.boundary)
+ bdry_matcher = re.compile(bdry_pattern, re.MULTILINE)
+
+ # some data has prefix text before first boundary
+ m = bdry_matcher.search(entire_body)
+ if m:
+ pos_part_start = pos_body_start + m.end()
+ pos_iter_start = lpos_end = m.end()
+ yield pos_body_start, Text, entire_body[:m.start()]
+ yield pos_body_start + lpos_end, String.Delimiter, m.group()
+ else:
+ pos_part_start = pos_body_start
+ pos_iter_start = 0
+
+ # process tokens of each body part
+ for m in bdry_matcher.finditer(entire_body, pos_iter_start):
+ # bodypart
+ lpos_start = pos_part_start - pos_body_start
+ lpos_end = m.start()
+ part = entire_body[lpos_start:lpos_end]
+ for i, t, v in self.get_bodypart_tokens(part):
+ yield pos_part_start + i, t, v
+
+ # boundary
+ yield pos_body_start + lpos_end, String.Delimiter, m.group()
+ pos_part_start = pos_body_start + m.end()
+
+ # some data has suffix text after last boundary
+ lpos_start = pos_part_start - pos_body_start
+ if lpos_start != len(entire_body):
+ yield pos_part_start, Text, entire_body[lpos_start:]
+
+ def get_bodypart_tokens(self, text):
+ # return if:
+ # * no content
+ # * no content type specific
+ # * content encoding is not readable
+ # * max recurrsion exceed
+ if not text.strip() or not self.content_type:
+ return [(0, Other, text)]
+
+ cte = self.content_transfer_encoding
+ if cte and cte not in {"8bit", "7bit", "quoted-printable"}:
+ return [(0, Other, text)]
+
+ if self.max_nested_level == 0:
+ return [(0, Other, text)]
+
+ # get lexer
+ try:
+ lexer = get_lexer_for_mimetype(self.content_type)
+ except ClassNotFound:
+ return [(0, Other, text)]
+
+ if isinstance(lexer, type(self)):
+ lexer.max_nested_level = self.max_nested_level - 1
+
+ return lexer.get_tokens_unprocessed(text)
+
+ def store_content_type(self, match):
+ self.content_type = match.group(1)
+
+ prefix_len = match.start(1) - match.start(0)
+ yield match.start(0), Text.Whitespace, match.group(0)[:prefix_len]
+ yield match.start(1), Name.Label, match.group(2)
+ yield match.end(2), String.Delimiter, '/'
+ yield match.start(3), Name.Label, match.group(3)
+
+ def get_content_type_subtokens(self, match):
+ yield match.start(1), Text, match.group(1)
+ yield match.start(2), Text.Whitespace, match.group(2)
+ yield match.start(3), Name.Attribute, match.group(3)
+ yield match.start(4), Operator, match.group(4)
+ yield match.start(5), String, match.group(5)
+
+ if match.group(3).lower() == "boundary":
+ boundary = match.group(5).strip()
+ if boundary[0] == '"' and boundary[-1] == '"':
+ boundary = boundary[1:-1]
+ self.boundary = boundary
+
+ def store_content_transfer_encoding(self, match):
+ self.content_transfer_encoding = match.group(0).lower()
+ yield match.start(0), Name.Constant, match.group(0)
+
+ attention_headers = {"content-type", "content-transfer-encoding"}
+
+ tokens = {
+ "root": [
+ (r"^([\w-]+):( *)([\s\S]*?\n)(?![ \t])", get_header_tokens),
+ (r"^$[\s\S]+", get_body_tokens),
+ ],
+ "header": [
+ # folding
+ (r"\n[ \t]", Text.Whitespace),
+ (r"\n(?![ \t])", Text.Whitespace, "#pop"),
+ ],
+ "content-type": [
+ include("header"),
+ (
+ r"^\s*((multipart|application|audio|font|image|model|text|video"
+ r"|message)/([\w-]+))",
+ store_content_type,
+ ),
+ (r'(;)((?:[ \t]|\n[ \t])*)([\w:-]+)(=)([\s\S]*?)(?=;|\n(?![ \t]))',
+ get_content_type_subtokens),
+ (r';[ \t]*\n(?![ \t])', Text, '#pop'),
+ ],
+ "content-transfer-encoding": [
+ include("header"),
+ (r"([\w-]+)", store_content_transfer_encoding),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ml.py b/contrib/python/Pygments/py3/pygments/lexers/ml.py
index 60bd8b9dbc..7b7f47b5f9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ml.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ml.py
@@ -4,7 +4,7 @@
Lexers for ML family languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,7 +14,7 @@ from pygments.lexer import RegexLexer, include, bygroups, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error
-__all__ = ['SMLLexer', 'OcamlLexer', 'OpaLexer', 'ReasonLexer', 'FStarLexer']
+__all__ = ['SMLLexer', 'OcamlLexer', 'OpaLexer', 'ReasonLexer', 'FStarLexer']
class SMLLexer(RegexLexer):
@@ -29,7 +29,7 @@ class SMLLexer(RegexLexer):
filenames = ['*.sml', '*.sig', '*.fun']
mimetypes = ['text/x-standardml', 'application/x-standardml']
- alphanumid_reserved = {
+ alphanumid_reserved = {
# Core
'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
@@ -38,16 +38,16 @@ class SMLLexer(RegexLexer):
# Modules
'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
'struct', 'structure', 'where',
- }
+ }
- symbolicid_reserved = {
+ symbolicid_reserved = {
# Core
':', r'\|', '=', '=>', '->', '#',
# Modules
':>',
- }
+ }
- nonid_reserved = {'(', ')', '[', ']', '{', '}', ',', ';', '...', '_'}
+ nonid_reserved = {'(', ')', '[', ']', '{', '}', ',', ';', '...', '_'}
alphanumid_re = r"[a-zA-Z][\w']*"
symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
@@ -141,7 +141,7 @@ class SMLLexer(RegexLexer):
(r'#\s+(%s)' % symbolicid_re, Name.Label),
# Some reserved words trigger a special, local lexer state change
(r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
- (r'\b(exception)\b(?!\')', Keyword.Reserved, 'ename'),
+ (r'\b(exception)\b(?!\')', Keyword.Reserved, 'ename'),
(r'\b(functor|include|open|signature|structure)\b(?!\')',
Keyword.Reserved, 'sname'),
(r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
@@ -314,14 +314,14 @@ class SMLLexer(RegexLexer):
'ename': [
include('whitespace'),
- (r'(and\b)(\s+)(%s)' % alphanumid_re,
+ (r'(and\b)(\s+)(%s)' % alphanumid_re,
bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'(and\b)(\s*)(%s)' % symbolicid_re,
+ (r'(and\b)(\s*)(%s)' % symbolicid_re,
bygroups(Keyword.Reserved, Text, Name.Class)),
(r'\b(of)\b(?!\')', Keyword.Reserved),
- (r'(%s)|(%s)' % (alphanumid_re, symbolicid_re), Name.Class),
+ (r'(%s)|(%s)' % (alphanumid_re, symbolicid_re), Name.Class),
- default('#pop'),
+ default('#pop'),
],
'datcon': [
@@ -443,7 +443,7 @@ class OcamlLexer(RegexLexer):
],
}
-
+
class OpaLexer(RegexLexer):
"""
Lexer for the Opa language (http://opalang.org).
@@ -765,193 +765,193 @@ class OpaLexer(RegexLexer):
(r'[^\-]+|-', Comment),
],
}
-
-
-class ReasonLexer(RegexLexer):
- """
- For the ReasonML language (https://reasonml.github.io/).
-
- .. versionadded:: 2.6
- """
-
- name = 'ReasonML'
- aliases = ['reasonml', 'reason']
- filenames = ['*.re', '*.rei']
- mimetypes = ['text/x-reasonml']
-
- keywords = (
- 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', 'downto',
- 'else', 'end', 'exception', 'external', 'false', 'for', 'fun', 'esfun',
- 'function', 'functor', 'if', 'in', 'include', 'inherit', 'initializer', 'lazy',
- 'let', 'switch', 'module', 'pub', 'mutable', 'new', 'nonrec', 'object', 'of',
- 'open', 'pri', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- 'type', 'val', 'virtual', 'when', 'while', 'with',
- )
- keyopts = (
- '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
- r'-\.', '=>', r'\.', r'\.\.', r'\.\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
- '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
- r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|\|', r'\|]', r'\}', '~'
- )
-
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lsr', 'lxor', 'mod', 'or')
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\"\'ntbr]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
- 'root': [
- (r'\s+', Text),
- (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name.Class),
- (r'//.*?\n', Comment.Single),
- (r'\/\*(?!/)', Comment.Multiline, 'comment'),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator.Word),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
- (r'\d[\d_]*', Number.Integer),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword),
-
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'\/\*', Comment.Multiline, '#push'),
- (r'\*\/', Comment.Multiline, '#pop'),
- (r'\*', Comment.Multiline),
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- include('escape-sequence'),
- (r'\\\n', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name.Class, '#pop'),
- (r'[a-z_][\w\']*', Name, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class FStarLexer(RegexLexer):
- """
- For the F* language (https://www.fstar-lang.org/).
- .. versionadded:: 2.7
- """
-
- name = 'FStar'
- aliases = ['fstar']
- filenames = ['*.fst', '*.fsti']
- mimetypes = ['text/x-fstar']
-
- keywords = (
- 'abstract', 'attributes', 'noeq', 'unopteq', 'and'
- 'begin', 'by', 'default', 'effect', 'else', 'end', 'ensures',
- 'exception', 'exists', 'false', 'forall', 'fun', 'function', 'if',
- 'in', 'include', 'inline', 'inline_for_extraction', 'irreducible',
- 'logic', 'match', 'module', 'mutable', 'new', 'new_effect', 'noextract',
- 'of', 'open', 'opaque', 'private', 'range_of', 'reifiable',
- 'reify', 'reflectable', 'requires', 'set_range_of', 'sub_effect',
- 'synth', 'then', 'total', 'true', 'try', 'type', 'unfold', 'unfoldable',
- 'val', 'when', 'with', 'not'
- )
- decl_keywords = ('let', 'rec')
- assume_keywords = ('assume', 'admit', 'assert', 'calc')
- keyopts = (
- r'~', r'-', r'/\\', r'\\/', r'<:', r'<@', r'\(\|', r'\|\)', r'#', r'u#',
- r'&', r'\(', r'\)', r'\(\)', r',', r'~>', r'->', r'<-', r'<--', r'<==>',
- r'==>', r'\.', r'\?', r'\?\.', r'\.\[', r'\.\(', r'\.\(\|', r'\.\[\|',
- r'\{:pattern', r':', r'::', r':=', r';', r';;', r'=', r'%\[', r'!\{',
- r'\[', r'\[@', r'\[\|', r'\|>', r'\]', r'\|\]', r'\{', r'\|', r'\}', r'\$'
- )
-
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\"\'ntbr]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
- 'root': [
- (r'\s+', Text),
- (r'false|true|False|True|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name.Class),
- (r'\(\*(?![)])', Comment, 'comment'),
- (r'^\/\/.+$', Comment),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'\b(%s)\b' % '|'.join(assume_keywords), Name.Exception),
- (r'\b(%s)\b' % '|'.join(decl_keywords), Keyword.Declaration),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
- (r'\d[\d_]*', Number.Integer),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
- (r"\`([\w\'.]+)\`", Operator.Word), # for infix applications
- (r"\`", Keyword), # for quoting
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- include('escape-sequence'),
- (r'\\\n', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name.Class, '#pop'),
- (r'[a-z_][\w\']*', Name, '#pop'),
- default('#pop'),
- ],
- }
+
+
+class ReasonLexer(RegexLexer):
+ """
+ For the ReasonML language (https://reasonml.github.io/).
+
+ .. versionadded:: 2.6
+ """
+
+ name = 'ReasonML'
+ aliases = ['reasonml', 'reason']
+ filenames = ['*.re', '*.rei']
+ mimetypes = ['text/x-reasonml']
+
+ keywords = (
+ 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', 'downto',
+ 'else', 'end', 'exception', 'external', 'false', 'for', 'fun', 'esfun',
+ 'function', 'functor', 'if', 'in', 'include', 'inherit', 'initializer', 'lazy',
+ 'let', 'switch', 'module', 'pub', 'mutable', 'new', 'nonrec', 'object', 'of',
+ 'open', 'pri', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
+ 'type', 'val', 'virtual', 'when', 'while', 'with',
+ )
+ keyopts = (
+ '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
+ r'-\.', '=>', r'\.', r'\.\.', r'\.\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
+ '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
+ r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|\|', r'\|]', r'\}', '~'
+ )
+
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lsr', 'lxor', 'mod', 'or')
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+ primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
+
+ tokens = {
+ 'escape-sequence': [
+ (r'\\[\\"\'ntbr]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ ],
+ 'root': [
+ (r'\s+', Text),
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name.Class),
+ (r'//.*?\n', Comment.Single),
+ (r'\/\*(?!/)', Comment.Multiline, 'comment'),
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator.Word),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+ (r'\d[\d_]*', Number.Integer),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'", Keyword),
+
+ (r'"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ ],
+ 'comment': [
+ (r'[^/*]+', Comment.Multiline),
+ (r'\/\*', Comment.Multiline, '#push'),
+ (r'\*\/', Comment.Multiline, '#pop'),
+ (r'\*', Comment.Multiline),
+ ],
+ 'string': [
+ (r'[^\\"]+', String.Double),
+ include('escape-sequence'),
+ (r'\\\n', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
+ (r'[a-z_][\w\']*', Name, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class FStarLexer(RegexLexer):
+ """
+ For the F* language (https://www.fstar-lang.org/).
+ .. versionadded:: 2.7
+ """
+
+ name = 'FStar'
+ aliases = ['fstar']
+ filenames = ['*.fst', '*.fsti']
+ mimetypes = ['text/x-fstar']
+
+ keywords = (
+ 'abstract', 'attributes', 'noeq', 'unopteq', 'and'
+ 'begin', 'by', 'default', 'effect', 'else', 'end', 'ensures',
+ 'exception', 'exists', 'false', 'forall', 'fun', 'function', 'if',
+ 'in', 'include', 'inline', 'inline_for_extraction', 'irreducible',
+ 'logic', 'match', 'module', 'mutable', 'new', 'new_effect', 'noextract',
+ 'of', 'open', 'opaque', 'private', 'range_of', 'reifiable',
+ 'reify', 'reflectable', 'requires', 'set_range_of', 'sub_effect',
+ 'synth', 'then', 'total', 'true', 'try', 'type', 'unfold', 'unfoldable',
+ 'val', 'when', 'with', 'not'
+ )
+ decl_keywords = ('let', 'rec')
+ assume_keywords = ('assume', 'admit', 'assert', 'calc')
+ keyopts = (
+ r'~', r'-', r'/\\', r'\\/', r'<:', r'<@', r'\(\|', r'\|\)', r'#', r'u#',
+ r'&', r'\(', r'\)', r'\(\)', r',', r'~>', r'->', r'<-', r'<--', r'<==>',
+ r'==>', r'\.', r'\?', r'\?\.', r'\.\[', r'\.\(', r'\.\(\|', r'\.\[\|',
+ r'\{:pattern', r':', r'::', r':=', r';', r';;', r'=', r'%\[', r'!\{',
+ r'\[', r'\[@', r'\[\|', r'\|>', r'\]', r'\|\]', r'\{', r'\|', r'\}', r'\$'
+ )
+
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+ primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
+
+ tokens = {
+ 'escape-sequence': [
+ (r'\\[\\"\'ntbr]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ ],
+ 'root': [
+ (r'\s+', Text),
+ (r'false|true|False|True|\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name.Class),
+ (r'\(\*(?![)])', Comment, 'comment'),
+ (r'^\/\/.+$', Comment),
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'\b(%s)\b' % '|'.join(assume_keywords), Name.Exception),
+ (r'\b(%s)\b' % '|'.join(decl_keywords), Keyword.Declaration),
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+ (r'\d[\d_]*', Number.Integer),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'", Keyword), # a stray quote is another syntax element
+ (r"\`([\w\'.]+)\`", Operator.Word), # for infix applications
+ (r"\`", Keyword), # for quoting
+ (r'"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'string': [
+ (r'[^\\"]+', String.Double),
+ include('escape-sequence'),
+ (r'\\\n', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
+ (r'[a-z_][\w\']*', Name, '#pop'),
+ default('#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/modeling.py b/contrib/python/Pygments/py3/pygments/lexers/modeling.py
index b00a7f10b3..2d076bf72e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/modeling.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/modeling.py
@@ -4,7 +4,7 @@
Lexers for modeling languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -37,7 +37,7 @@ class ModelicaLexer(RegexLexer):
tokens = {
'whitespace': [
- (r'[\s\ufeff]+', Text),
+ (r'[\s\ufeff]+', Text),
(r'//[^\n]*\n?', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline)
],
@@ -61,8 +61,8 @@ class ModelicaLexer(RegexLexer):
r'transpose|vector|zeros)\b', Name.Builtin),
(r'(algorithm|annotation|break|connect|constant|constrainedby|der|'
r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
- r'equation|exit|expandable|extends|external|firstTick|final|flow|for|if|'
- r'import|impure|in|initial|inner|input|interval|loop|nondiscrete|outer|'
+ r'equation|exit|expandable|extends|external|firstTick|final|flow|for|if|'
+ r'import|impure|in|initial|inner|input|interval|loop|nondiscrete|outer|'
r'output|parameter|partial|protected|public|pure|redeclare|'
r'replaceable|return|stream|then|when|while)\b',
Keyword.Reserved),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/modula2.py b/contrib/python/Pygments/py3/pygments/lexers/modula2.py
index cad2f4fd40..b4d98c03cb 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/modula2.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/modula2.py
@@ -4,7 +4,7 @@
Multi-Dialect Lexer for Modula-2.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -227,8 +227,8 @@ class Modula2Lexer(RegexLexer):
(r'[0-9A-F]+H', Number.Hex),
],
'string_literals': [
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
],
'digraph_operators': [
# Dot Product Operator
@@ -1546,34 +1546,34 @@ class Modula2Lexer(RegexLexer):
# substitute lexemes when in Algol mode
if self.algol_publication_mode:
if value == '#':
- value = '≠'
+ value = '≠'
elif value == '<=':
- value = '≤'
+ value = '≤'
elif value == '>=':
- value = '≥'
+ value = '≥'
elif value == '==':
- value = '≡'
+ value = '≡'
elif value == '*.':
- value = '•'
+ value = '•'
# return result
yield index, token, value
-
- def analyse_text(text):
- """It's Pascal-like, but does not use FUNCTION -- uses PROCEDURE
- instead."""
-
- # Check if this looks like Pascal, if not, bail out early
- if not ('(*' in text and '*)' in text and ':=' in text):
- return
-
- result = 0
- # Procedure is in Modula2
- if re.search(r'\bPROCEDURE\b', text):
- result += 0.6
-
- # FUNCTION is only valid in Pascal, but not in Modula2
- if re.search(r'\bFUNCTION\b', text):
- result = 0.0
-
- return result
+
+ def analyse_text(text):
+ """It's Pascal-like, but does not use FUNCTION -- uses PROCEDURE
+ instead."""
+
+ # Check if this looks like Pascal, if not, bail out early
+ if not ('(*' in text and '*)' in text and ':=' in text):
+ return
+
+ result = 0
+ # Procedure is in Modula2
+ if re.search(r'\bPROCEDURE\b', text):
+ result += 0.6
+
+ # FUNCTION is only valid in Pascal, but not in Modula2
+ if re.search(r'\bFUNCTION\b', text):
+ result = 0.0
+
+ return result
diff --git a/contrib/python/Pygments/py3/pygments/lexers/monte.py b/contrib/python/Pygments/py3/pygments/lexers/monte.py
index 4cd83241dd..6ef240ac51 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/monte.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/monte.py
@@ -4,7 +4,7 @@
Lexer for the Monte programming language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/mosel.py b/contrib/python/Pygments/py3/pygments/lexers/mosel.py
index 62cb3b4c16..5ef6fba27d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/mosel.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/mosel.py
@@ -1,447 +1,447 @@
-"""
- pygments.lexers.mosel
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the mosel language.
- http://www.fico.com/en/products/fico-xpress-optimization
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['MoselLexer']
-
-FUNCTIONS = (
- # core functions
- '_',
- 'abs',
- 'arctan',
- 'asproc',
- 'assert',
- 'bitflip',
- 'bitneg',
- 'bitset',
- 'bitshift',
- 'bittest',
- 'bitval',
- 'ceil',
- 'cos',
- 'create',
- 'currentdate',
- 'currenttime',
- 'cutelt',
- 'cutfirst',
- 'cuthead',
- 'cutlast',
- 'cuttail',
- 'datablock',
- 'delcell',
- 'exists',
- 'exit',
- 'exp',
- 'exportprob',
- 'fclose',
- 'fflush',
- 'finalize',
- 'findfirst',
- 'findlast',
- 'floor',
- 'fopen',
- 'fselect',
- 'fskipline',
- 'fwrite',
- 'fwrite_',
- 'fwriteln',
- 'fwriteln_',
- 'getact',
- 'getcoeff',
- 'getcoeffs',
- 'getdual',
- 'getelt',
- 'getfid',
- 'getfirst',
- 'getfname',
- 'gethead',
- 'getlast',
- 'getobjval',
- 'getparam',
- 'getrcost',
- 'getreadcnt',
- 'getreverse',
- 'getsize',
- 'getslack',
- 'getsol',
- 'gettail',
- 'gettype',
- 'getvars',
- 'isdynamic',
- 'iseof',
- 'isfinite',
- 'ishidden',
- 'isinf',
- 'isnan',
- 'isodd',
- 'ln',
- 'localsetparam',
- 'log',
- 'makesos1',
- 'makesos2',
- 'maxlist',
- 'memoryuse',
- 'minlist',
- 'newmuid',
- 'publish',
- 'random',
- 'read',
- 'readln',
- 'reset',
- 'restoreparam',
- 'reverse',
- 'round',
- 'setcoeff',
- 'sethidden',
- 'setioerr',
- 'setmatherr',
- 'setname',
- 'setparam',
- 'setrandseed',
- 'setrange',
- 'settype',
- 'sin',
- 'splithead',
- 'splittail',
- 'sqrt',
- 'strfmt',
- 'substr',
- 'timestamp',
- 'unpublish',
- 'versionnum',
- 'versionstr',
- 'write',
- 'write_',
- 'writeln',
- 'writeln_',
-
- # mosel exam mmxprs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
- 'addcut',
- 'addcuts',
- 'addmipsol',
- 'basisstability',
- 'calcsolinfo',
- 'clearmipdir',
- 'clearmodcut',
- 'command',
- 'copysoltoinit',
- 'crossoverlpsol',
- 'defdelayedrows',
- 'defsecurevecs',
- 'delcuts',
- 'dropcuts',
- 'estimatemarginals',
- 'fixglobal',
- 'flushmsgq',
- 'getbstat',
- 'getcnlist',
- 'getcplist',
- 'getdualray',
- 'getiis',
- 'getiissense',
- 'getiistype',
- 'getinfcause',
- 'getinfeas',
- 'getlb',
- 'getlct',
- 'getleft',
- 'getloadedlinctrs',
- 'getloadedmpvars',
- 'getname',
- 'getprimalray',
- 'getprobstat',
- 'getrange',
- 'getright',
- 'getsensrng',
- 'getsize',
- 'getsol',
- 'gettype',
- 'getub',
- 'getvars',
- 'gety',
- 'hasfeature',
- 'implies',
- 'indicator',
- 'initglobal',
- 'ishidden',
- 'isiisvalid',
- 'isintegral',
- 'loadbasis',
- 'loadcuts',
- 'loadlpsol',
- 'loadmipsol',
- 'loadprob',
- 'maximise',
- 'maximize',
- 'minimise',
- 'minimize',
- 'postsolve',
- 'readbasis',
- 'readdirs',
- 'readsol',
- 'refinemipsol',
- 'rejectintsol',
- 'repairinfeas',
- 'repairinfeas_deprec',
- 'resetbasis',
- 'resetiis',
- 'resetsol',
- 'savebasis',
- 'savemipsol',
- 'savesol',
- 'savestate',
- 'selectsol',
- 'setarchconsistency',
- 'setbstat',
- 'setcallback',
- 'setcbcutoff',
- 'setgndata',
- 'sethidden',
- 'setlb',
- 'setmipdir',
- 'setmodcut',
- 'setsol',
- 'setub',
- 'setucbdata',
- 'stopoptimise',
- 'stopoptimize',
- 'storecut',
- 'storecuts',
- 'unloadprob',
- 'uselastbarsol',
- 'writebasis',
- 'writedirs',
- 'writeprob',
- 'writesol',
- 'xor',
- 'xprs_addctr',
- 'xprs_addindic',
-
- # mosel exam mmsystem | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
- 'addmonths',
- 'copytext',
- 'cuttext',
- 'deltext',
- 'endswith',
- 'erase',
- 'expandpath',
- 'fcopy',
- 'fdelete',
- 'findfiles',
- 'findtext',
- 'fmove',
- 'formattext',
- 'getasnumber',
- 'getchar',
- 'getcwd',
- 'getdate',
- 'getday',
- 'getdaynum',
- 'getdays',
- 'getdirsep',
- 'getdsoparam',
- 'getendparse',
- 'getenv',
- 'getfsize',
- 'getfstat',
- 'getftime',
- 'gethour',
- 'getminute',
- 'getmonth',
- 'getmsec',
- 'getoserrmsg',
- 'getoserror',
- 'getpathsep',
- 'getqtype',
- 'getsecond',
- 'getsepchar',
- 'getsize',
- 'getstart',
- 'getsucc',
- 'getsysinfo',
- 'getsysstat',
- 'gettime',
- 'gettmpdir',
- 'gettrim',
- 'getweekday',
- 'getyear',
- 'inserttext',
- 'isvalid',
- 'jointext',
- 'makedir',
- 'makepath',
- 'newtar',
- 'newzip',
- 'nextfield',
- 'openpipe',
- 'parseextn',
- 'parseint',
- 'parsereal',
- 'parsetext',
- 'pastetext',
- 'pathmatch',
- 'pathsplit',
- 'qsort',
- 'quote',
- 'readtextline',
- 'regmatch',
- 'regreplace',
- 'removedir',
- 'removefiles',
- 'setchar',
- 'setdate',
- 'setday',
- 'setdsoparam',
- 'setendparse',
- 'setenv',
- 'sethour',
- 'setminute',
- 'setmonth',
- 'setmsec',
- 'setoserror',
- 'setqtype',
- 'setsecond',
- 'setsepchar',
- 'setstart',
- 'setsucc',
- 'settime',
- 'settrim',
- 'setyear',
- 'sleep',
- 'splittext',
- 'startswith',
- 'system',
- 'tarlist',
- 'textfmt',
- 'tolower',
- 'toupper',
- 'trim',
- 'untar',
- 'unzip',
- 'ziplist',
-
- # mosel exam mmjobs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
- 'canceltimer',
- 'clearaliases',
- 'compile',
- 'connect',
- 'detach',
- 'disconnect',
- 'dropnextevent',
- 'findxsrvs',
- 'getaliases',
- 'getannidents',
- 'getannotations',
- 'getbanner',
- 'getclass',
- 'getdsoprop',
- 'getdsopropnum',
- 'getexitcode',
- 'getfromgid',
- 'getfromid',
- 'getfromuid',
- 'getgid',
- 'gethostalias',
- 'getid',
- 'getmodprop',
- 'getmodpropnum',
- 'getnextevent',
- 'getnode',
- 'getrmtid',
- 'getstatus',
- 'getsysinfo',
- 'gettimer',
- 'getuid',
- 'getvalue',
- 'isqueueempty',
- 'load',
- 'nullevent',
- 'peeknextevent',
- 'resetmodpar',
- 'run',
- 'send',
- 'setcontrol',
- 'setdefstream',
- 'setgid',
- 'sethostalias',
- 'setmodpar',
- 'settimer',
- 'setuid',
- 'setworkdir',
- 'stop',
- 'unload',
- 'wait',
- 'waitexpired',
- 'waitfor',
- 'waitforend',
-)
-
-
-class MoselLexer(RegexLexer):
- """
- For the Mosel optimization language.
-
- .. versionadded:: 2.6
- """
- name = 'Mosel'
- aliases = ['mosel']
- filenames = ['*.mos']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text.Whitespace),
- (r'!.*?\n', Comment.Single),
- (r'\(!(.|\n)*?!\)', Comment.Multiline),
- (words((
- 'and', 'as', 'break', 'case', 'count', 'declarations', 'do',
- 'dynamic', 'elif', 'else', 'end-', 'end', 'evaluation', 'false',
- 'forall', 'forward', 'from', 'function', 'hashmap', 'if',
- 'imports', 'include', 'initialisations', 'initializations', 'inter',
- 'max', 'min', 'model', 'namespace', 'next', 'not', 'nsgroup',
- 'nssearch', 'of', 'options', 'or', 'package', 'parameters',
- 'procedure', 'public', 'prod', 'record', 'repeat', 'requirements',
- 'return', 'sum', 'then', 'to', 'true', 'union', 'until', 'uses',
- 'version', 'while', 'with'), prefix=r'\b', suffix=r'\b'),
- Keyword.Builtin),
- (words((
- 'range', 'array', 'set', 'list', 'mpvar', 'mpproblem', 'linctr',
- 'nlctr', 'integer', 'string', 'real', 'boolean', 'text', 'time',
- 'date', 'datetime', 'returned', 'Model', 'Mosel', 'counter',
- 'xmldoc', 'is_sos1', 'is_sos2', 'is_integer', 'is_binary',
- 'is_continuous', 'is_free', 'is_semcont', 'is_semint',
- 'is_partint'), prefix=r'\b', suffix=r'\b'),
- Keyword.Type),
- (r'(\+|\-|\*|/|=|<=|>=|\||\^|<|>|<>|\.\.|\.|:=|::|:|in|mod|div)',
- Operator),
- (r'[()\[\]{},;]+', Punctuation),
- (words(FUNCTIONS, prefix=r'\b', suffix=r'\b'), Name.Function),
- (r'(\d+\.(?!\.)\d*|\.(?!.)\d+)([eE][+-]?\d+)?', Number.Float),
- (r'\d+([eE][+-]?\d+)?', Number.Integer),
- (r'[+-]?Infinity', Number.Integer),
- (r'0[xX][0-9a-fA-F]+', Number),
- (r'"', String.Double, 'double_quote'),
- (r'\'', String.Single, 'single_quote'),
- (r'(\w+|(\.(?!\.)))', Text),
- ],
- 'single_quote': [
- (r'\'', String.Single, '#pop'),
- (r'[^\']+', String.Single),
- ],
- 'double_quote': [
- (r'(\\"|\\[0-7]{1,3}\D|\\[abfnrtv]|\\\\)', String.Escape),
- (r'\"', String.Double, '#pop'),
- (r'[^"\\]+', String.Double),
- ],
- }
+"""
+ pygments.lexers.mosel
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the mosel language.
+ http://www.fico.com/en/products/fico-xpress-optimization
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['MoselLexer']
+
+FUNCTIONS = (
+ # core functions
+ '_',
+ 'abs',
+ 'arctan',
+ 'asproc',
+ 'assert',
+ 'bitflip',
+ 'bitneg',
+ 'bitset',
+ 'bitshift',
+ 'bittest',
+ 'bitval',
+ 'ceil',
+ 'cos',
+ 'create',
+ 'currentdate',
+ 'currenttime',
+ 'cutelt',
+ 'cutfirst',
+ 'cuthead',
+ 'cutlast',
+ 'cuttail',
+ 'datablock',
+ 'delcell',
+ 'exists',
+ 'exit',
+ 'exp',
+ 'exportprob',
+ 'fclose',
+ 'fflush',
+ 'finalize',
+ 'findfirst',
+ 'findlast',
+ 'floor',
+ 'fopen',
+ 'fselect',
+ 'fskipline',
+ 'fwrite',
+ 'fwrite_',
+ 'fwriteln',
+ 'fwriteln_',
+ 'getact',
+ 'getcoeff',
+ 'getcoeffs',
+ 'getdual',
+ 'getelt',
+ 'getfid',
+ 'getfirst',
+ 'getfname',
+ 'gethead',
+ 'getlast',
+ 'getobjval',
+ 'getparam',
+ 'getrcost',
+ 'getreadcnt',
+ 'getreverse',
+ 'getsize',
+ 'getslack',
+ 'getsol',
+ 'gettail',
+ 'gettype',
+ 'getvars',
+ 'isdynamic',
+ 'iseof',
+ 'isfinite',
+ 'ishidden',
+ 'isinf',
+ 'isnan',
+ 'isodd',
+ 'ln',
+ 'localsetparam',
+ 'log',
+ 'makesos1',
+ 'makesos2',
+ 'maxlist',
+ 'memoryuse',
+ 'minlist',
+ 'newmuid',
+ 'publish',
+ 'random',
+ 'read',
+ 'readln',
+ 'reset',
+ 'restoreparam',
+ 'reverse',
+ 'round',
+ 'setcoeff',
+ 'sethidden',
+ 'setioerr',
+ 'setmatherr',
+ 'setname',
+ 'setparam',
+ 'setrandseed',
+ 'setrange',
+ 'settype',
+ 'sin',
+ 'splithead',
+ 'splittail',
+ 'sqrt',
+ 'strfmt',
+ 'substr',
+ 'timestamp',
+ 'unpublish',
+ 'versionnum',
+ 'versionstr',
+ 'write',
+ 'write_',
+ 'writeln',
+ 'writeln_',
+
+ # mosel exam mmxprs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
+ 'addcut',
+ 'addcuts',
+ 'addmipsol',
+ 'basisstability',
+ 'calcsolinfo',
+ 'clearmipdir',
+ 'clearmodcut',
+ 'command',
+ 'copysoltoinit',
+ 'crossoverlpsol',
+ 'defdelayedrows',
+ 'defsecurevecs',
+ 'delcuts',
+ 'dropcuts',
+ 'estimatemarginals',
+ 'fixglobal',
+ 'flushmsgq',
+ 'getbstat',
+ 'getcnlist',
+ 'getcplist',
+ 'getdualray',
+ 'getiis',
+ 'getiissense',
+ 'getiistype',
+ 'getinfcause',
+ 'getinfeas',
+ 'getlb',
+ 'getlct',
+ 'getleft',
+ 'getloadedlinctrs',
+ 'getloadedmpvars',
+ 'getname',
+ 'getprimalray',
+ 'getprobstat',
+ 'getrange',
+ 'getright',
+ 'getsensrng',
+ 'getsize',
+ 'getsol',
+ 'gettype',
+ 'getub',
+ 'getvars',
+ 'gety',
+ 'hasfeature',
+ 'implies',
+ 'indicator',
+ 'initglobal',
+ 'ishidden',
+ 'isiisvalid',
+ 'isintegral',
+ 'loadbasis',
+ 'loadcuts',
+ 'loadlpsol',
+ 'loadmipsol',
+ 'loadprob',
+ 'maximise',
+ 'maximize',
+ 'minimise',
+ 'minimize',
+ 'postsolve',
+ 'readbasis',
+ 'readdirs',
+ 'readsol',
+ 'refinemipsol',
+ 'rejectintsol',
+ 'repairinfeas',
+ 'repairinfeas_deprec',
+ 'resetbasis',
+ 'resetiis',
+ 'resetsol',
+ 'savebasis',
+ 'savemipsol',
+ 'savesol',
+ 'savestate',
+ 'selectsol',
+ 'setarchconsistency',
+ 'setbstat',
+ 'setcallback',
+ 'setcbcutoff',
+ 'setgndata',
+ 'sethidden',
+ 'setlb',
+ 'setmipdir',
+ 'setmodcut',
+ 'setsol',
+ 'setub',
+ 'setucbdata',
+ 'stopoptimise',
+ 'stopoptimize',
+ 'storecut',
+ 'storecuts',
+ 'unloadprob',
+ 'uselastbarsol',
+ 'writebasis',
+ 'writedirs',
+ 'writeprob',
+ 'writesol',
+ 'xor',
+ 'xprs_addctr',
+ 'xprs_addindic',
+
+ # mosel exam mmsystem | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
+ 'addmonths',
+ 'copytext',
+ 'cuttext',
+ 'deltext',
+ 'endswith',
+ 'erase',
+ 'expandpath',
+ 'fcopy',
+ 'fdelete',
+ 'findfiles',
+ 'findtext',
+ 'fmove',
+ 'formattext',
+ 'getasnumber',
+ 'getchar',
+ 'getcwd',
+ 'getdate',
+ 'getday',
+ 'getdaynum',
+ 'getdays',
+ 'getdirsep',
+ 'getdsoparam',
+ 'getendparse',
+ 'getenv',
+ 'getfsize',
+ 'getfstat',
+ 'getftime',
+ 'gethour',
+ 'getminute',
+ 'getmonth',
+ 'getmsec',
+ 'getoserrmsg',
+ 'getoserror',
+ 'getpathsep',
+ 'getqtype',
+ 'getsecond',
+ 'getsepchar',
+ 'getsize',
+ 'getstart',
+ 'getsucc',
+ 'getsysinfo',
+ 'getsysstat',
+ 'gettime',
+ 'gettmpdir',
+ 'gettrim',
+ 'getweekday',
+ 'getyear',
+ 'inserttext',
+ 'isvalid',
+ 'jointext',
+ 'makedir',
+ 'makepath',
+ 'newtar',
+ 'newzip',
+ 'nextfield',
+ 'openpipe',
+ 'parseextn',
+ 'parseint',
+ 'parsereal',
+ 'parsetext',
+ 'pastetext',
+ 'pathmatch',
+ 'pathsplit',
+ 'qsort',
+ 'quote',
+ 'readtextline',
+ 'regmatch',
+ 'regreplace',
+ 'removedir',
+ 'removefiles',
+ 'setchar',
+ 'setdate',
+ 'setday',
+ 'setdsoparam',
+ 'setendparse',
+ 'setenv',
+ 'sethour',
+ 'setminute',
+ 'setmonth',
+ 'setmsec',
+ 'setoserror',
+ 'setqtype',
+ 'setsecond',
+ 'setsepchar',
+ 'setstart',
+ 'setsucc',
+ 'settime',
+ 'settrim',
+ 'setyear',
+ 'sleep',
+ 'splittext',
+ 'startswith',
+ 'system',
+ 'tarlist',
+ 'textfmt',
+ 'tolower',
+ 'toupper',
+ 'trim',
+ 'untar',
+ 'unzip',
+ 'ziplist',
+
+ # mosel exam mmjobs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
+ 'canceltimer',
+ 'clearaliases',
+ 'compile',
+ 'connect',
+ 'detach',
+ 'disconnect',
+ 'dropnextevent',
+ 'findxsrvs',
+ 'getaliases',
+ 'getannidents',
+ 'getannotations',
+ 'getbanner',
+ 'getclass',
+ 'getdsoprop',
+ 'getdsopropnum',
+ 'getexitcode',
+ 'getfromgid',
+ 'getfromid',
+ 'getfromuid',
+ 'getgid',
+ 'gethostalias',
+ 'getid',
+ 'getmodprop',
+ 'getmodpropnum',
+ 'getnextevent',
+ 'getnode',
+ 'getrmtid',
+ 'getstatus',
+ 'getsysinfo',
+ 'gettimer',
+ 'getuid',
+ 'getvalue',
+ 'isqueueempty',
+ 'load',
+ 'nullevent',
+ 'peeknextevent',
+ 'resetmodpar',
+ 'run',
+ 'send',
+ 'setcontrol',
+ 'setdefstream',
+ 'setgid',
+ 'sethostalias',
+ 'setmodpar',
+ 'settimer',
+ 'setuid',
+ 'setworkdir',
+ 'stop',
+ 'unload',
+ 'wait',
+ 'waitexpired',
+ 'waitfor',
+ 'waitforend',
+)
+
+
+class MoselLexer(RegexLexer):
+ """
+ For the Mosel optimization language.
+
+ .. versionadded:: 2.6
+ """
+ name = 'Mosel'
+ aliases = ['mosel']
+ filenames = ['*.mos']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text.Whitespace),
+ (r'!.*?\n', Comment.Single),
+ (r'\(!(.|\n)*?!\)', Comment.Multiline),
+ (words((
+ 'and', 'as', 'break', 'case', 'count', 'declarations', 'do',
+ 'dynamic', 'elif', 'else', 'end-', 'end', 'evaluation', 'false',
+ 'forall', 'forward', 'from', 'function', 'hashmap', 'if',
+ 'imports', 'include', 'initialisations', 'initializations', 'inter',
+ 'max', 'min', 'model', 'namespace', 'next', 'not', 'nsgroup',
+ 'nssearch', 'of', 'options', 'or', 'package', 'parameters',
+ 'procedure', 'public', 'prod', 'record', 'repeat', 'requirements',
+ 'return', 'sum', 'then', 'to', 'true', 'union', 'until', 'uses',
+ 'version', 'while', 'with'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Builtin),
+ (words((
+ 'range', 'array', 'set', 'list', 'mpvar', 'mpproblem', 'linctr',
+ 'nlctr', 'integer', 'string', 'real', 'boolean', 'text', 'time',
+ 'date', 'datetime', 'returned', 'Model', 'Mosel', 'counter',
+ 'xmldoc', 'is_sos1', 'is_sos2', 'is_integer', 'is_binary',
+ 'is_continuous', 'is_free', 'is_semcont', 'is_semint',
+ 'is_partint'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Type),
+ (r'(\+|\-|\*|/|=|<=|>=|\||\^|<|>|<>|\.\.|\.|:=|::|:|in|mod|div)',
+ Operator),
+ (r'[()\[\]{},;]+', Punctuation),
+ (words(FUNCTIONS, prefix=r'\b', suffix=r'\b'), Name.Function),
+ (r'(\d+\.(?!\.)\d*|\.(?!.)\d+)([eE][+-]?\d+)?', Number.Float),
+ (r'\d+([eE][+-]?\d+)?', Number.Integer),
+ (r'[+-]?Infinity', Number.Integer),
+ (r'0[xX][0-9a-fA-F]+', Number),
+ (r'"', String.Double, 'double_quote'),
+ (r'\'', String.Single, 'single_quote'),
+ (r'(\w+|(\.(?!\.)))', Text),
+ ],
+ 'single_quote': [
+ (r'\'', String.Single, '#pop'),
+ (r'[^\']+', String.Single),
+ ],
+ 'double_quote': [
+ (r'(\\"|\\[0-7]{1,3}\D|\\[abfnrtv]|\\\\)', String.Escape),
+ (r'\"', String.Double, '#pop'),
+ (r'[^"\\]+', String.Double),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ncl.py b/contrib/python/Pygments/py3/pygments/lexers/ncl.py
index f9df40bdeb..6753393f9d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ncl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ncl.py
@@ -4,7 +4,7 @@
Lexers for NCAR Command Language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/nimrod.py b/contrib/python/Pygments/py3/pygments/lexers/nimrod.py
index ce6ba87537..b9d06d14ea 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/nimrod.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/nimrod.py
@@ -4,7 +4,7 @@
Lexer for the Nim language (formerly known as Nimrod).
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -25,7 +25,7 @@ class NimrodLexer(RegexLexer):
"""
name = 'Nimrod'
- aliases = ['nimrod', 'nim']
+ aliases = ['nimrod', 'nim']
filenames = ['*.nim', '*.nimrod']
mimetypes = ['text/x-nim']
@@ -42,14 +42,14 @@ class NimrodLexer(RegexLexer):
return "|".join(newWords)
keywords = [
- 'addr', 'and', 'as', 'asm', 'bind', 'block', 'break', 'case',
+ 'addr', 'and', 'as', 'asm', 'bind', 'block', 'break', 'case',
'cast', 'concept', 'const', 'continue', 'converter', 'defer', 'discard',
'distinct', 'div', 'do', 'elif', 'else', 'end', 'enum', 'except',
'export', 'finally', 'for', 'func', 'if', 'in', 'yield', 'interface',
'is', 'isnot', 'iterator', 'let', 'macro', 'method', 'mixin', 'mod',
'not', 'notin', 'object', 'of', 'or', 'out', 'proc', 'ptr', 'raise',
- 'ref', 'return', 'shl', 'shr', 'static', 'template', 'try',
- 'tuple', 'type', 'using', 'when', 'while', 'xor'
+ 'ref', 'return', 'shl', 'shr', 'static', 'template', 'try',
+ 'tuple', 'type', 'using', 'when', 'while', 'xor'
]
keywordsPseudo = [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/nit.py b/contrib/python/Pygments/py3/pygments/lexers/nit.py
index 7a15d78f99..3c30037d17 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/nit.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/nit.py
@@ -4,7 +4,7 @@
Lexer for the Nit language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/nix.py b/contrib/python/Pygments/py3/pygments/lexers/nix.py
index bd7afe7440..783f17032a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/nix.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/nix.py
@@ -4,7 +4,7 @@
Lexers for the NixOS Nix language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/oberon.py b/contrib/python/Pygments/py3/pygments/lexers/oberon.py
index 7010e910b5..5868e8fe35 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/oberon.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/oberon.py
@@ -4,7 +4,7 @@
Lexers for Oberon family languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -102,19 +102,19 @@ class ComponentPascalLexer(RegexLexer):
(r'(TRUE|FALSE|NIL|INF)\b', Keyword.Constant),
]
}
-
- def analyse_text(text):
- """The only other lexer using .cp is the C++ one, so we check if for
- a few common Pascal keywords here. Those are unfortunately quite
- common across various business languages as well."""
- result = 0
- if 'BEGIN' in text:
- result += 0.01
- if 'END' in text:
- result += 0.01
- if 'PROCEDURE' in text:
- result += 0.01
- if 'END' in text:
- result += 0.01
-
- return result
+
+ def analyse_text(text):
+ """The only other lexer using .cp is the C++ one, so we check if for
+ a few common Pascal keywords here. Those are unfortunately quite
+ common across various business languages as well."""
+ result = 0
+ if 'BEGIN' in text:
+ result += 0.01
+ if 'END' in text:
+ result += 0.01
+ if 'PROCEDURE' in text:
+ result += 0.01
+ if 'END' in text:
+ result += 0.01
+
+ return result
diff --git a/contrib/python/Pygments/py3/pygments/lexers/objective.py b/contrib/python/Pygments/py3/pygments/lexers/objective.py
index a4cc44b387..bd06853361 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/objective.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/objective.py
@@ -4,7 +4,7 @@
Lexers for Objective-C family languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -260,11 +260,11 @@ class LogosLexer(ObjectiveCppLexer):
'logos_classname'),
(r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
bygroups(Keyword, Text, Name.Class)),
- (r'(%config)(\s*\(\s*)(\w+)(\s*=)(.*?)(\)\s*)',
+ (r'(%config)(\s*\(\s*)(\w+)(\s*=)(.*?)(\)\s*)',
bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
(r'(%ctor)(\s*)(\{)', bygroups(Keyword, Text, Punctuation),
'function'),
- (r'(%new)(\s*)(\()(.*?)(\))',
+ (r'(%new)(\s*)(\()(.*?)(\))',
bygroups(Keyword, Text, Keyword, String, Keyword)),
(r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
inherit,
@@ -412,7 +412,7 @@ class SwiftLexer(RegexLexer):
],
'keywords': [
(words((
- 'as', 'async', 'await', 'break', 'case', 'catch', 'continue', 'default', 'defer',
+ 'as', 'async', 'await', 'break', 'case', 'catch', 'continue', 'default', 'defer',
'do', 'else', 'fallthrough', 'for', 'guard', 'if', 'in', 'is',
'repeat', 'return', '#selector', 'switch', 'throw', 'try',
'where', 'while'), suffix=r'\b'),
@@ -440,8 +440,8 @@ class SwiftLexer(RegexLexer):
(r'(var|let)(\s+)([a-zA-Z_]\w*)', bygroups(Keyword.Declaration,
Text, Name.Variable)),
(words((
- 'actor', 'associatedtype', 'class', 'deinit', 'enum', 'extension', 'func', 'import',
- 'init', 'internal', 'let', 'operator', 'private', 'protocol', 'public',
+ 'actor', 'associatedtype', 'class', 'deinit', 'enum', 'extension', 'func', 'import',
+ 'init', 'internal', 'let', 'operator', 'private', 'protocol', 'public',
'static', 'struct', 'subscript', 'typealias', 'var'), suffix=r'\b'),
Keyword.Declaration)
],
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ooc.py b/contrib/python/Pygments/py3/pygments/lexers/ooc.py
index 0c74cdec4b..1317bba11b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ooc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ooc.py
@@ -4,7 +4,7 @@
Lexers for the Ooc language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/other.py b/contrib/python/Pygments/py3/pygments/lexers/other.py
index b0930088e6..d9566bdd6e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/other.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/other.py
@@ -4,7 +4,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/parasail.py b/contrib/python/Pygments/py3/pygments/lexers/parasail.py
index 49d8d672e1..a3f6443057 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/parasail.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/parasail.py
@@ -4,7 +4,7 @@
Lexer for ParaSail.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/parsers.py b/contrib/python/Pygments/py3/pygments/lexers/parsers.py
index 0009082fc4..2861cdd41a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/parsers.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/parsers.py
@@ -4,7 +4,7 @@
Lexers for parser generators.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -63,10 +63,10 @@ class RagelLexer(RegexLexer):
(r'[+-]?[0-9]+', Number.Integer),
],
'literals': [
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'\[(\\\\|\\[^\\]|[^\\\]])*\]', String), # square bracket literals
- (r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', String.Regex), # regular expressions
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'\[(\\\\|\\[^\\]|[^\\\]])*\]', String), # square bracket literals
+ (r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', String.Regex), # regular expressions
],
'identifiers': [
(r'[a-zA-Z_]\w*', Name.Variable),
@@ -105,15 +105,15 @@ class RagelLexer(RegexLexer):
r'[^\\]\\[{}]', # allow escaped { or }
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\[^\\]|[^"\\])*"',
- r"'(\\\\|\\[^\\]|[^'\\])*'",
+ r'"(\\\\|\\[^\\]|[^"\\])*"',
+ r"'(\\\\|\\[^\\]|[^'\\])*'",
r'//.*$\n?', # single line comment
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
r'\#.*$\n?', # ruby comment
# regular expression: There's no reason for it to start
# with a * and this stops confusion with comments.
- r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
+ r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
# / is safe now that we've handled regex and javadoc comments
r'/',
@@ -146,12 +146,12 @@ class RagelEmbeddedLexer(RegexLexer):
r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\[^\\]|[^"\\])*"',
- r"'(\\\\|\\[^\\]|[^'\\])*'",
+ r'"(\\\\|\\[^\\]|[^"\\])*"',
+ r"'(\\\\|\\[^\\]|[^'\\])*'",
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
r'//.*$\n?', # single line comment
r'\#.*$\n?', # ruby/ragel comment
- r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', # regular expression
+ r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', # regular expression
# / is safe now that we've handled regex and javadoc comments
r'/',
@@ -181,7 +181,7 @@ class RagelEmbeddedLexer(RegexLexer):
# specifically allow regex followed immediately by *
# so it doesn't get mistaken for a comment
- r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/\*',
+ r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/\*',
# allow / as long as it's not followed by another / or by a *
r'/(?=[^/*]|$)',
@@ -192,9 +192,9 @@ class RagelEmbeddedLexer(RegexLexer):
)) + r')+',
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\[^\\]|[^"\\])*"',
- r"'(\\\\|\\[^\\]|[^'\\])*'",
- r"\[(\\\\|\\[^\\]|[^\]\\])*\]", # square bracket literal
+ r'"(\\\\|\\[^\\]|[^"\\])*"',
+ r"'(\\\\|\\[^\\]|[^'\\])*'",
+ r"\[(\\\\|\\[^\\]|[^\]\\])*\]", # square bracket literal
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
r'//.*$\n?', # single line comment
r'\#.*$\n?', # ruby/ragel comment
@@ -220,7 +220,7 @@ class RagelRubyLexer(DelegatingLexer):
filenames = ['*.rl']
def __init__(self, **options):
- super().__init__(RubyLexer, RagelEmbeddedLexer, **options)
+ super().__init__(RubyLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: ruby' in text
@@ -238,7 +238,7 @@ class RagelCLexer(DelegatingLexer):
filenames = ['*.rl']
def __init__(self, **options):
- super().__init__(CLexer, RagelEmbeddedLexer, **options)
+ super().__init__(CLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: c' in text
@@ -256,7 +256,7 @@ class RagelDLexer(DelegatingLexer):
filenames = ['*.rl']
def __init__(self, **options):
- super().__init__(DLexer, RagelEmbeddedLexer, **options)
+ super().__init__(DLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: d' in text
@@ -274,7 +274,7 @@ class RagelCppLexer(DelegatingLexer):
filenames = ['*.rl']
def __init__(self, **options):
- super().__init__(CppLexer, RagelEmbeddedLexer, **options)
+ super().__init__(CppLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: c++' in text
@@ -292,7 +292,7 @@ class RagelObjectiveCLexer(DelegatingLexer):
filenames = ['*.rl']
def __init__(self, **options):
- super().__init__(ObjectiveCLexer, RagelEmbeddedLexer, **options)
+ super().__init__(ObjectiveCLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: objc' in text
@@ -310,7 +310,7 @@ class RagelJavaLexer(DelegatingLexer):
filenames = ['*.rl']
def __init__(self, **options):
- super().__init__(JavaLexer, RagelEmbeddedLexer, **options)
+ super().__init__(JavaLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: java' in text
@@ -415,8 +415,8 @@ class AntlrLexer(RegexLexer):
(r':', Punctuation),
# literals
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'<<([^>]|>[^>])>>', String),
# identifiers
# Tokens start with capital letter.
@@ -455,14 +455,14 @@ class AntlrLexer(RegexLexer):
r'[^${}\'"/\\]+', # exclude unsafe characters
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\[^\\]|[^"\\])*"',
- r"'(\\\\|\\[^\\]|[^'\\])*'",
+ r'"(\\\\|\\[^\\]|[^"\\])*"',
+ r"'(\\\\|\\[^\\]|[^'\\])*'",
r'//.*$\n?', # single line comment
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
# regular expression: There's no reason for it to start
# with a * and this stops confusion with comments.
- r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
+ r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
# backslashes are okay, as long as we are not backslashing a %
r'\\(?!%)',
@@ -482,14 +482,14 @@ class AntlrLexer(RegexLexer):
r'[^$\[\]\'"/]+', # exclude unsafe characters
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\[^\\]|[^"\\])*"',
- r"'(\\\\|\\[^\\]|[^'\\])*'",
+ r'"(\\\\|\\[^\\]|[^"\\])*"',
+ r"'(\\\\|\\[^\\]|[^'\\])*'",
r'//.*$\n?', # single line comment
r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
# regular expression: There's no reason for it to start
# with a * and this stops confusion with comments.
- r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
+ r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
# Now that we've handled regex and javadoc comments
# it's safe to let / through.
@@ -508,7 +508,7 @@ class AntlrLexer(RegexLexer):
def analyse_text(text):
return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M)
-
+
# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
class AntlrCppLexer(DelegatingLexer):
@@ -523,7 +523,7 @@ class AntlrCppLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super().__init__(CppLexer, AntlrLexer, **options)
+ super().__init__(CppLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -542,7 +542,7 @@ class AntlrObjectiveCLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super().__init__(ObjectiveCLexer, AntlrLexer, **options)
+ super().__init__(ObjectiveCLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -561,7 +561,7 @@ class AntlrCSharpLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super().__init__(CSharpLexer, AntlrLexer, **options)
+ super().__init__(CSharpLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -580,7 +580,7 @@ class AntlrPythonLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super().__init__(PythonLexer, AntlrLexer, **options)
+ super().__init__(PythonLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -599,7 +599,7 @@ class AntlrJavaLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super().__init__(JavaLexer, AntlrLexer, **options)
+ super().__init__(JavaLexer, AntlrLexer, **options)
def analyse_text(text):
# Antlr language is Java by default
@@ -618,7 +618,7 @@ class AntlrRubyLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super().__init__(RubyLexer, AntlrLexer, **options)
+ super().__init__(RubyLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -637,7 +637,7 @@ class AntlrPerlLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super().__init__(PerlLexer, AntlrLexer, **options)
+ super().__init__(PerlLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -652,12 +652,12 @@ class AntlrActionScriptLexer(DelegatingLexer):
"""
name = 'ANTLR With ActionScript Target'
- aliases = ['antlr-actionscript', 'antlr-as']
+ aliases = ['antlr-actionscript', 'antlr-as']
filenames = ['*.G', '*.g']
def __init__(self, **options):
from pygments.lexers.actionscript import ActionScriptLexer
- super().__init__(ActionScriptLexer, AntlrLexer, **options)
+ super().__init__(ActionScriptLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -700,8 +700,8 @@ class TreetopBaseLexer(RegexLexer):
'rule': [
include('space'),
include('end'),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'([A-Za-z_]\w*)(:)', bygroups(Name.Label, Punctuation)),
(r'[A-Za-z_]\w*', Name),
(r'[()]', Punctuation),
@@ -745,7 +745,7 @@ class TreetopLexer(DelegatingLexer):
filenames = ['*.treetop', '*.tt']
def __init__(self, **options):
- super().__init__(RubyLexer, TreetopBaseLexer, **options)
+ super().__init__(RubyLexer, TreetopBaseLexer, **options)
class EbnfLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/pascal.py b/contrib/python/Pygments/py3/pygments/lexers/pascal.py
index 0d1ac3fdb7..f7397eb392 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/pascal.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/pascal.py
@@ -4,7 +4,7 @@
Lexers for Pascal family languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -67,29 +67,29 @@ class DelphiLexer(Lexer):
'dispose', 'exit', 'false', 'new', 'true'
)
- BLOCK_KEYWORDS = {
+ BLOCK_KEYWORDS = {
'begin', 'class', 'const', 'constructor', 'destructor', 'end',
'finalization', 'function', 'implementation', 'initialization',
'label', 'library', 'operator', 'procedure', 'program', 'property',
'record', 'threadvar', 'type', 'unit', 'uses', 'var'
- }
+ }
- FUNCTION_MODIFIERS = {
+ FUNCTION_MODIFIERS = {
'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
'override', 'assembler'
- }
+ }
# XXX: those aren't global. but currently we know no way for defining
# them just for the type context.
- DIRECTIVES = {
+ DIRECTIVES = {
'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
'published', 'public'
- }
+ }
- BUILTIN_TYPES = {
+ BUILTIN_TYPES = {
'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
@@ -103,7 +103,7 @@ class DelphiLexer(Lexer):
'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
'widechar', 'widestring', 'word', 'wordbool'
- }
+ }
BUILTIN_UNITS = {
'System': (
@@ -245,7 +245,7 @@ class DelphiLexer(Lexer):
)
}
- ASM_REGISTERS = {
+ ASM_REGISTERS = {
'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
@@ -254,9 +254,9 @@ class DelphiLexer(Lexer):
'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
'xmm6', 'xmm7'
- }
+ }
- ASM_INSTRUCTIONS = {
+ ASM_INSTRUCTIONS = {
'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
@@ -295,7 +295,7 @@ class DelphiLexer(Lexer):
'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
'xlatb', 'xor'
- }
+ }
def __init__(self, **options):
Lexer.__init__(self, **options)
@@ -562,9 +562,9 @@ class AdaLexer(RegexLexer):
'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited',
'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding',
'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue',
- 'return', 'reverse', 'select', 'separate', 'some', 'subtype',
- 'synchronized', 'task', 'tagged', 'terminate', 'then', 'type', 'until',
- 'when', 'while', 'xor'), prefix=r'\b', suffix=r'\b'),
+ 'return', 'reverse', 'select', 'separate', 'some', 'subtype',
+ 'synchronized', 'task', 'tagged', 'terminate', 'then', 'type', 'until',
+ 'when', 'while', 'xor'), prefix=r'\b', suffix=r'\b'),
Keyword.Reserved),
(r'"[^"]*"', String),
include('attribute'),
@@ -576,7 +576,7 @@ class AdaLexer(RegexLexer):
(r'\n+', Text),
],
'numbers': [
- (r'[0-9_]+#[0-9a-f_\.]+#', Number.Hex),
+ (r'[0-9_]+#[0-9a-f_\.]+#', Number.Hex),
(r'[0-9_]+\.[0-9_]*', Number.Float),
(r'[0-9_]+', Number.Integer),
],
diff --git a/contrib/python/Pygments/py3/pygments/lexers/pawn.py b/contrib/python/Pygments/py3/pygments/lexers/pawn.py
index 5a303e4949..7bfbdbdfb4 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/pawn.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/pawn.py
@@ -4,13 +4,13 @@
Lexers for the Pawn languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
+ Number, Punctuation
from pygments.util import get_bool_opt
__all__ = ['SourcePawnLexer', 'PawnLexer']
@@ -84,25 +84,25 @@ class SourcePawnLexer(RegexLexer):
]
}
- SM_TYPES = {'Action', 'bool', 'Float', 'Plugin', 'String', 'any',
- 'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
- 'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
- 'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow',
- 'ConVarBounds', 'QueryCookie', 'ReplySource',
- 'ConVarQueryResult', 'ConVarQueryFinished', 'Function',
- 'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult',
- 'DBBindType', 'DBPriority', 'PropType', 'PropFieldType',
- 'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode',
- 'EventHook', 'FileType', 'FileTimeMode', 'PathType',
- 'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes',
- 'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction',
- 'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary',
- 'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType',
- 'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType',
- 'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus',
- 'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
- 'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
- 'TopMenuPosition', 'TopMenuObject', 'UserMsg'}
+ SM_TYPES = {'Action', 'bool', 'Float', 'Plugin', 'String', 'any',
+ 'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
+ 'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
+ 'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow',
+ 'ConVarBounds', 'QueryCookie', 'ReplySource',
+ 'ConVarQueryResult', 'ConVarQueryFinished', 'Function',
+ 'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult',
+ 'DBBindType', 'DBPriority', 'PropType', 'PropFieldType',
+ 'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode',
+ 'EventHook', 'FileType', 'FileTimeMode', 'PathType',
+ 'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes',
+ 'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction',
+ 'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary',
+ 'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType',
+ 'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType',
+ 'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus',
+ 'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
+ 'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
+ 'TopMenuPosition', 'TopMenuObject', 'UserMsg'}
def __init__(self, **options):
self.smhighlighting = get_bool_opt(options,
@@ -194,9 +194,9 @@ class PawnLexer(RegexLexer):
(r'.*?\n', Comment),
]
}
-
- def analyse_text(text):
- """This is basically C. There is a keyword which doesn't exist in C
- though and is nearly unique to this language."""
- if 'tagof' in text:
- return 0.01
+
+ def analyse_text(text):
+ """This is basically C. There is a keyword which doesn't exist in C
+ though and is nearly unique to this language."""
+ if 'tagof' in text:
+ return 0.01
diff --git a/contrib/python/Pygments/py3/pygments/lexers/perl.py b/contrib/python/Pygments/py3/pygments/lexers/perl.py
index bac325bb45..13fc6867f1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/perl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/perl.py
@@ -2,9 +2,9 @@
pygments.lexers.perl
~~~~~~~~~~~~~~~~~~~~
- Lexers for Perl, Raku and related languages.
+ Lexers for Perl, Raku and related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,12 +21,12 @@ __all__ = ['PerlLexer', 'Perl6Lexer']
class PerlLexer(RegexLexer):
"""
- For `Perl <https://www.perl.org>`_ source code.
+ For `Perl <https://www.perl.org>`_ source code.
"""
name = 'Perl'
aliases = ['perl', 'pl']
- filenames = ['*.pl', '*.pm', '*.t', '*.perl']
+ filenames = ['*.pl', '*.pm', '*.t', '*.perl']
mimetypes = ['text/x-perl', 'application/x-perl']
flags = re.DOTALL | re.MULTILINE
@@ -207,205 +207,205 @@ class PerlLexer(RegexLexer):
def analyse_text(text):
if shebang_matches(text, r'perl'):
return True
-
- result = 0
-
+
+ result = 0
+
if re.search(r'(?:my|our)\s+[$@%(]', text):
- result += 0.9
-
- if ':=' in text:
- # := is not valid Perl, but it appears in unicon, so we should
- # become less confident if we think we found Perl with :=
- result /= 2
-
- return result
+ result += 0.9
+ if ':=' in text:
+ # := is not valid Perl, but it appears in unicon, so we should
+ # become less confident if we think we found Perl with :=
+ result /= 2
+ return result
+
+
class Perl6Lexer(ExtendedRegexLexer):
"""
- For `Raku <https://www.raku.org>`_ (a.k.a. Perl 6) source code.
+ For `Raku <https://www.raku.org>`_ (a.k.a. Perl 6) source code.
.. versionadded:: 2.0
"""
name = 'Perl6'
- aliases = ['perl6', 'pl6', 'raku']
+ aliases = ['perl6', 'pl6', 'raku']
filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6',
- '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod',
- '*.rakutest', '*.rakudoc']
+ '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod',
+ '*.rakutest', '*.rakudoc']
mimetypes = ['text/x-perl6', 'application/x-perl6']
flags = re.MULTILINE | re.DOTALL | re.UNICODE
PERL6_IDENTIFIER_RANGE = r"['\w:-]"
PERL6_KEYWORDS = (
- #Phasers
- 'BEGIN','CATCH','CHECK','CLOSE','CONTROL','DOC','END','ENTER','FIRST',
- 'INIT','KEEP','LAST','LEAVE','NEXT','POST','PRE','QUIT','UNDO',
- #Keywords
- 'anon','augment','but','class','constant','default','does','else',
- 'elsif','enum','for','gather','given','grammar','has','if','import',
- 'is','let','loop','made','make','method','module','multi','my','need',
- 'orwith','our','proceed','proto','repeat','require','return',
- 'return-rw','returns','role','rule','state','sub','submethod','subset',
- 'succeed','supersede','token','try','unit','unless','until','use',
- 'when','while','with','without',
- #Traits
- 'export','native','repr','required','rw','symbol',
+ #Phasers
+ 'BEGIN','CATCH','CHECK','CLOSE','CONTROL','DOC','END','ENTER','FIRST',
+ 'INIT','KEEP','LAST','LEAVE','NEXT','POST','PRE','QUIT','UNDO',
+ #Keywords
+ 'anon','augment','but','class','constant','default','does','else',
+ 'elsif','enum','for','gather','given','grammar','has','if','import',
+ 'is','let','loop','made','make','method','module','multi','my','need',
+ 'orwith','our','proceed','proto','repeat','require','return',
+ 'return-rw','returns','role','rule','state','sub','submethod','subset',
+ 'succeed','supersede','token','try','unit','unless','until','use',
+ 'when','while','with','without',
+ #Traits
+ 'export','native','repr','required','rw','symbol',
)
PERL6_BUILTINS = (
- 'ACCEPTS','abs','abs2rel','absolute','accept','accessed','acos',
- 'acosec','acosech','acosh','acotan','acotanh','acquire','act','action',
- 'actions','add','add_attribute','add_enum_value','add_fallback',
- 'add_method','add_parent','add_private_method','add_role','add_trustee',
- 'adverb','after','all','allocate','allof','allowed','alternative-names',
- 'annotations','antipair','antipairs','any','anyof','app_lifetime',
- 'append','arch','archname','args','arity','Array','asec','asech','asin',
- 'asinh','ASSIGN-KEY','ASSIGN-POS','assuming','ast','at','atan','atan2',
- 'atanh','AT-KEY','atomic-assign','atomic-dec-fetch','atomic-fetch',
- 'atomic-fetch-add','atomic-fetch-dec','atomic-fetch-inc',
- 'atomic-fetch-sub','atomic-inc-fetch','AT-POS','attributes','auth',
- 'await','backtrace','Bag','BagHash','bail-out','base','basename',
- 'base-repeating','batch','BIND-KEY','BIND-POS','bind-stderr',
- 'bind-stdin','bind-stdout','bind-udp','bits','bless','block','Bool',
- 'bool-only','bounds','break','Bridge','broken','BUILD','build-date',
- 'bytes','cache','callframe','calling-package','CALL-ME','callsame',
- 'callwith','can','cancel','candidates','cando','can-ok','canonpath',
- 'caps','caption','Capture','cas','catdir','categorize','categorize-list',
- 'catfile','catpath','cause','ceiling','cglobal','changed','Channel',
- 'chars','chdir','child','child-name','child-typename','chmod','chomp',
- 'chop','chr','chrs','chunks','cis','classify','classify-list','cleanup',
- 'clone','close','closed','close-stdin','cmp-ok','code','codes','collate',
- 'column','comb','combinations','command','comment','compiler','Complex',
- 'compose','compose_type','composer','condition','config',
- 'configure_destroy','configure_type_checking','conj','connect',
- 'constraints','construct','contains','contents','copy','cos','cosec',
- 'cosech','cosh','cotan','cotanh','count','count-only','cpu-cores',
- 'cpu-usage','CREATE','create_type','cross','cue','curdir','curupdir','d',
- 'Date','DateTime','day','daycount','day-of-month','day-of-week',
- 'day-of-year','days-in-month','declaration','decode','decoder','deepmap',
- 'default','defined','DEFINITE','delayed','DELETE-KEY','DELETE-POS',
- 'denominator','desc','DESTROY','destroyers','devnull','diag',
- 'did-you-mean','die','dies-ok','dir','dirname','dir-sep','DISTROnames',
- 'do','does','does-ok','done','done-testing','duckmap','dynamic','e',
- 'eager','earlier','elems','emit','enclosing','encode','encoder',
- 'encoding','end','ends-with','enum_from_value','enum_value_list',
- 'enum_values','enums','eof','EVAL','eval-dies-ok','EVALFILE',
- 'eval-lives-ok','exception','excludes-max','excludes-min','EXISTS-KEY',
- 'EXISTS-POS','exit','exitcode','exp','expected','explicitly-manage',
- 'expmod','extension','f','fail','fails-like','fc','feature','file',
- 'filename','find_method','find_method_qualified','finish','first','flat',
- 'flatmap','flip','floor','flunk','flush','fmt','format','formatter',
- 'freeze','from','from-list','from-loop','from-posix','full',
- 'full-barrier','get','get_value','getc','gist','got','grab','grabpairs',
- 'grep','handle','handled','handles','hardware','has_accessor','Hash',
- 'head','headers','hh-mm-ss','hidden','hides','hour','how','hyper','id',
- 'illegal','im','in','indent','index','indices','indir','infinite',
- 'infix','infix:<+>','infix:<->','install_method_cache','Instant',
- 'instead','Int','int-bounds','interval','in-timezone','invalid-str',
- 'invert','invocant','IO','IO::Notification.watch-path','is_trusted',
- 'is_type','isa','is-absolute','isa-ok','is-approx','is-deeply',
- 'is-hidden','is-initial-thread','is-int','is-lazy','is-leap-year',
- 'isNaN','isnt','is-prime','is-relative','is-routine','is-setting',
- 'is-win','item','iterator','join','keep','kept','KERNELnames','key',
- 'keyof','keys','kill','kv','kxxv','l','lang','last','lastcall','later',
- 'lazy','lc','leading','level','like','line','lines','link','List',
- 'listen','live','lives-ok','local','lock','log','log10','lookup','lsb',
- 'made','MAIN','make','Map','match','max','maxpairs','merge','message',
- 'method','method_table','methods','migrate','min','minmax','minpairs',
- 'minute','misplaced','Mix','MixHash','mkdir','mode','modified','month',
- 'move','mro','msb','multi','multiness','my','name','named','named_names',
- 'narrow','nativecast','native-descriptor','nativesizeof','new','new_type',
- 'new-from-daycount','new-from-pairs','next','nextcallee','next-handle',
- 'nextsame','nextwith','NFC','NFD','NFKC','NFKD','nl-in','nl-out',
- 'nodemap','nok','none','norm','not','note','now','nude','Num',
- 'numerator','Numeric','of','offset','offset-in-hours','offset-in-minutes',
- 'ok','old','on-close','one','on-switch','open','opened','operation',
- 'optional','ord','ords','orig','os-error','osname','out-buffer','pack',
- 'package','package-kind','package-name','packages','pair','pairs',
- 'pairup','parameter','params','parent','parent-name','parents','parse',
- 'parse-base','parsefile','parse-names','parts','pass','path','path-sep',
- 'payload','peer-host','peer-port','periods','perl','permutations','phaser',
- 'pick','pickpairs','pid','placeholder','plan','plus','polar','poll',
- 'polymod','pop','pos','positional','posix','postfix','postmatch',
- 'precomp-ext','precomp-target','pred','prefix','prematch','prepend',
- 'print','printf','print-nl','print-to','private','private_method_table',
- 'proc','produce','Promise','prompt','protect','pull-one','push',
- 'push-all','push-at-least','push-exactly','push-until-lazy','put',
- 'qualifier-type','quit','r','race','radix','rand','range','Rat','raw',
- 're','read','readchars','readonly','ready','Real','reallocate','reals',
- 'reason','rebless','receive','recv','redispatcher','redo','reduce',
- 'rel2abs','relative','release','rename','repeated','replacement',
- 'report','reserved','resolve','restore','result','resume','rethrow',
- 'reverse','right','rindex','rmdir','role','roles_to_compose','rolish',
- 'roll','rootdir','roots','rotate','rotor','round','roundrobin',
- 'routine-type','run','rwx','s','samecase','samemark','samewith','say',
- 'schedule-on','scheduler','scope','sec','sech','second','seek','self',
- 'send','Set','set_hidden','set_name','set_package','set_rw','set_value',
- 'SetHash','set-instruments','setup_finalization','shape','share','shell',
- 'shift','sibling','sigil','sign','signal','signals','signature','sin',
- 'sinh','sink','sink-all','skip','skip-at-least','skip-at-least-pull-one',
- 'skip-one','skip-rest','sleep','sleep-timer','sleep-until','Slip','slurp',
- 'slurp-rest','slurpy','snap','snapper','so','socket-host','socket-port',
- 'sort','source','source-package','spawn','SPEC','splice','split',
- 'splitdir','splitpath','sprintf','spurt','sqrt','squish','srand','stable',
- 'start','started','starts-with','status','stderr','stdout','Str',
- 'sub_signature','subbuf','subbuf-rw','subname','subparse','subst',
- 'subst-mutate','substr','substr-eq','substr-rw','subtest','succ','sum',
- 'Supply','symlink','t','tail','take','take-rw','tan','tanh','tap',
- 'target','target-name','tc','tclc','tell','then','throttle','throw',
- 'throws-like','timezone','tmpdir','to','today','todo','toggle','to-posix',
- 'total','trailing','trans','tree','trim','trim-leading','trim-trailing',
- 'truncate','truncated-to','trusts','try_acquire','trying','twigil','type',
- 'type_captures','typename','uc','udp','uncaught_handler','unimatch',
- 'uniname','uninames','uniparse','uniprop','uniprops','unique','unival',
- 'univals','unlike','unlink','unlock','unpack','unpolar','unshift',
- 'unwrap','updir','USAGE','use-ok','utc','val','value','values','VAR',
- 'variable','verbose-config','version','VMnames','volume','vow','w','wait',
- 'warn','watch','watch-path','week','weekday-of-month','week-number',
- 'week-year','WHAT','when','WHERE','WHEREFORE','WHICH','WHO',
- 'whole-second','WHY','wordcase','words','workaround','wrap','write',
- 'write-to','x','yada','year','yield','yyyy-mm-dd','z','zip','zip-latest',
-
+ 'ACCEPTS','abs','abs2rel','absolute','accept','accessed','acos',
+ 'acosec','acosech','acosh','acotan','acotanh','acquire','act','action',
+ 'actions','add','add_attribute','add_enum_value','add_fallback',
+ 'add_method','add_parent','add_private_method','add_role','add_trustee',
+ 'adverb','after','all','allocate','allof','allowed','alternative-names',
+ 'annotations','antipair','antipairs','any','anyof','app_lifetime',
+ 'append','arch','archname','args','arity','Array','asec','asech','asin',
+ 'asinh','ASSIGN-KEY','ASSIGN-POS','assuming','ast','at','atan','atan2',
+ 'atanh','AT-KEY','atomic-assign','atomic-dec-fetch','atomic-fetch',
+ 'atomic-fetch-add','atomic-fetch-dec','atomic-fetch-inc',
+ 'atomic-fetch-sub','atomic-inc-fetch','AT-POS','attributes','auth',
+ 'await','backtrace','Bag','BagHash','bail-out','base','basename',
+ 'base-repeating','batch','BIND-KEY','BIND-POS','bind-stderr',
+ 'bind-stdin','bind-stdout','bind-udp','bits','bless','block','Bool',
+ 'bool-only','bounds','break','Bridge','broken','BUILD','build-date',
+ 'bytes','cache','callframe','calling-package','CALL-ME','callsame',
+ 'callwith','can','cancel','candidates','cando','can-ok','canonpath',
+ 'caps','caption','Capture','cas','catdir','categorize','categorize-list',
+ 'catfile','catpath','cause','ceiling','cglobal','changed','Channel',
+ 'chars','chdir','child','child-name','child-typename','chmod','chomp',
+ 'chop','chr','chrs','chunks','cis','classify','classify-list','cleanup',
+ 'clone','close','closed','close-stdin','cmp-ok','code','codes','collate',
+ 'column','comb','combinations','command','comment','compiler','Complex',
+ 'compose','compose_type','composer','condition','config',
+ 'configure_destroy','configure_type_checking','conj','connect',
+ 'constraints','construct','contains','contents','copy','cos','cosec',
+ 'cosech','cosh','cotan','cotanh','count','count-only','cpu-cores',
+ 'cpu-usage','CREATE','create_type','cross','cue','curdir','curupdir','d',
+ 'Date','DateTime','day','daycount','day-of-month','day-of-week',
+ 'day-of-year','days-in-month','declaration','decode','decoder','deepmap',
+ 'default','defined','DEFINITE','delayed','DELETE-KEY','DELETE-POS',
+ 'denominator','desc','DESTROY','destroyers','devnull','diag',
+ 'did-you-mean','die','dies-ok','dir','dirname','dir-sep','DISTROnames',
+ 'do','does','does-ok','done','done-testing','duckmap','dynamic','e',
+ 'eager','earlier','elems','emit','enclosing','encode','encoder',
+ 'encoding','end','ends-with','enum_from_value','enum_value_list',
+ 'enum_values','enums','eof','EVAL','eval-dies-ok','EVALFILE',
+ 'eval-lives-ok','exception','excludes-max','excludes-min','EXISTS-KEY',
+ 'EXISTS-POS','exit','exitcode','exp','expected','explicitly-manage',
+ 'expmod','extension','f','fail','fails-like','fc','feature','file',
+ 'filename','find_method','find_method_qualified','finish','first','flat',
+ 'flatmap','flip','floor','flunk','flush','fmt','format','formatter',
+ 'freeze','from','from-list','from-loop','from-posix','full',
+ 'full-barrier','get','get_value','getc','gist','got','grab','grabpairs',
+ 'grep','handle','handled','handles','hardware','has_accessor','Hash',
+ 'head','headers','hh-mm-ss','hidden','hides','hour','how','hyper','id',
+ 'illegal','im','in','indent','index','indices','indir','infinite',
+ 'infix','infix:<+>','infix:<->','install_method_cache','Instant',
+ 'instead','Int','int-bounds','interval','in-timezone','invalid-str',
+ 'invert','invocant','IO','IO::Notification.watch-path','is_trusted',
+ 'is_type','isa','is-absolute','isa-ok','is-approx','is-deeply',
+ 'is-hidden','is-initial-thread','is-int','is-lazy','is-leap-year',
+ 'isNaN','isnt','is-prime','is-relative','is-routine','is-setting',
+ 'is-win','item','iterator','join','keep','kept','KERNELnames','key',
+ 'keyof','keys','kill','kv','kxxv','l','lang','last','lastcall','later',
+ 'lazy','lc','leading','level','like','line','lines','link','List',
+ 'listen','live','lives-ok','local','lock','log','log10','lookup','lsb',
+ 'made','MAIN','make','Map','match','max','maxpairs','merge','message',
+ 'method','method_table','methods','migrate','min','minmax','minpairs',
+ 'minute','misplaced','Mix','MixHash','mkdir','mode','modified','month',
+ 'move','mro','msb','multi','multiness','my','name','named','named_names',
+ 'narrow','nativecast','native-descriptor','nativesizeof','new','new_type',
+ 'new-from-daycount','new-from-pairs','next','nextcallee','next-handle',
+ 'nextsame','nextwith','NFC','NFD','NFKC','NFKD','nl-in','nl-out',
+ 'nodemap','nok','none','norm','not','note','now','nude','Num',
+ 'numerator','Numeric','of','offset','offset-in-hours','offset-in-minutes',
+ 'ok','old','on-close','one','on-switch','open','opened','operation',
+ 'optional','ord','ords','orig','os-error','osname','out-buffer','pack',
+ 'package','package-kind','package-name','packages','pair','pairs',
+ 'pairup','parameter','params','parent','parent-name','parents','parse',
+ 'parse-base','parsefile','parse-names','parts','pass','path','path-sep',
+ 'payload','peer-host','peer-port','periods','perl','permutations','phaser',
+ 'pick','pickpairs','pid','placeholder','plan','plus','polar','poll',
+ 'polymod','pop','pos','positional','posix','postfix','postmatch',
+ 'precomp-ext','precomp-target','pred','prefix','prematch','prepend',
+ 'print','printf','print-nl','print-to','private','private_method_table',
+ 'proc','produce','Promise','prompt','protect','pull-one','push',
+ 'push-all','push-at-least','push-exactly','push-until-lazy','put',
+ 'qualifier-type','quit','r','race','radix','rand','range','Rat','raw',
+ 're','read','readchars','readonly','ready','Real','reallocate','reals',
+ 'reason','rebless','receive','recv','redispatcher','redo','reduce',
+ 'rel2abs','relative','release','rename','repeated','replacement',
+ 'report','reserved','resolve','restore','result','resume','rethrow',
+ 'reverse','right','rindex','rmdir','role','roles_to_compose','rolish',
+ 'roll','rootdir','roots','rotate','rotor','round','roundrobin',
+ 'routine-type','run','rwx','s','samecase','samemark','samewith','say',
+ 'schedule-on','scheduler','scope','sec','sech','second','seek','self',
+ 'send','Set','set_hidden','set_name','set_package','set_rw','set_value',
+ 'SetHash','set-instruments','setup_finalization','shape','share','shell',
+ 'shift','sibling','sigil','sign','signal','signals','signature','sin',
+ 'sinh','sink','sink-all','skip','skip-at-least','skip-at-least-pull-one',
+ 'skip-one','skip-rest','sleep','sleep-timer','sleep-until','Slip','slurp',
+ 'slurp-rest','slurpy','snap','snapper','so','socket-host','socket-port',
+ 'sort','source','source-package','spawn','SPEC','splice','split',
+ 'splitdir','splitpath','sprintf','spurt','sqrt','squish','srand','stable',
+ 'start','started','starts-with','status','stderr','stdout','Str',
+ 'sub_signature','subbuf','subbuf-rw','subname','subparse','subst',
+ 'subst-mutate','substr','substr-eq','substr-rw','subtest','succ','sum',
+ 'Supply','symlink','t','tail','take','take-rw','tan','tanh','tap',
+ 'target','target-name','tc','tclc','tell','then','throttle','throw',
+ 'throws-like','timezone','tmpdir','to','today','todo','toggle','to-posix',
+ 'total','trailing','trans','tree','trim','trim-leading','trim-trailing',
+ 'truncate','truncated-to','trusts','try_acquire','trying','twigil','type',
+ 'type_captures','typename','uc','udp','uncaught_handler','unimatch',
+ 'uniname','uninames','uniparse','uniprop','uniprops','unique','unival',
+ 'univals','unlike','unlink','unlock','unpack','unpolar','unshift',
+ 'unwrap','updir','USAGE','use-ok','utc','val','value','values','VAR',
+ 'variable','verbose-config','version','VMnames','volume','vow','w','wait',
+ 'warn','watch','watch-path','week','weekday-of-month','week-number',
+ 'week-year','WHAT','when','WHERE','WHEREFORE','WHICH','WHO',
+ 'whole-second','WHY','wordcase','words','workaround','wrap','write',
+ 'write-to','x','yada','year','yield','yyyy-mm-dd','z','zip','zip-latest',
+
)
PERL6_BUILTIN_CLASSES = (
- #Booleans
- 'False','True',
- #Classes
- 'Any','Array','Associative','AST','atomicint','Attribute','Backtrace',
- 'Backtrace::Frame','Bag','Baggy','BagHash','Blob','Block','Bool','Buf',
- 'Callable','CallFrame','Cancellation','Capture','CArray','Channel','Code',
- 'compiler','Complex','ComplexStr','Cool','CurrentThreadScheduler',
- 'Cursor','Date','Dateish','DateTime','Distro','Duration','Encoding',
- 'Exception','Failure','FatRat','Grammar','Hash','HyperWhatever','Instant',
- 'Int','int16','int32','int64','int8','IntStr','IO','IO::ArgFiles',
- 'IO::CatHandle','IO::Handle','IO::Notification','IO::Path',
- 'IO::Path::Cygwin','IO::Path::QNX','IO::Path::Unix','IO::Path::Win32',
- 'IO::Pipe','IO::Socket','IO::Socket::Async','IO::Socket::INET','IO::Spec',
- 'IO::Spec::Cygwin','IO::Spec::QNX','IO::Spec::Unix','IO::Spec::Win32',
- 'IO::Special','Iterable','Iterator','Junction','Kernel','Label','List',
- 'Lock','Lock::Async','long','longlong','Macro','Map','Match',
- 'Metamodel::AttributeContainer','Metamodel::C3MRO','Metamodel::ClassHOW',
- 'Metamodel::EnumHOW','Metamodel::Finalization','Metamodel::MethodContainer',
- 'Metamodel::MROBasedMethodDispatch','Metamodel::MultipleInheritance',
- 'Metamodel::Naming','Metamodel::Primitives','Metamodel::PrivateMethodContainer',
- 'Metamodel::RoleContainer','Metamodel::Trusting','Method','Mix','MixHash',
- 'Mixy','Mu','NFC','NFD','NFKC','NFKD','Nil','Num','num32','num64',
- 'Numeric','NumStr','ObjAt','Order','Pair','Parameter','Perl','Pod::Block',
- 'Pod::Block::Code','Pod::Block::Comment','Pod::Block::Declarator',
- 'Pod::Block::Named','Pod::Block::Para','Pod::Block::Table','Pod::Heading',
- 'Pod::Item','Pointer','Positional','PositionalBindFailover','Proc',
- 'Proc::Async','Promise','Proxy','PseudoStash','QuantHash','Range','Rat',
- 'Rational','RatStr','Real','Regex','Routine','Scalar','Scheduler',
- 'Semaphore','Seq','Set','SetHash','Setty','Signature','size_t','Slip',
- 'Stash','Str','StrDistance','Stringy','Sub','Submethod','Supplier',
- 'Supplier::Preserving','Supply','Systemic','Tap','Telemetry',
- 'Telemetry::Instrument::Thread','Telemetry::Instrument::Usage',
- 'Telemetry::Period','Telemetry::Sampler','Thread','ThreadPoolScheduler',
- 'UInt','uint16','uint32','uint64','uint8','Uni','utf8','Variable',
- 'Version','VM','Whatever','WhateverCode','WrapHandle'
+ #Booleans
+ 'False','True',
+ #Classes
+ 'Any','Array','Associative','AST','atomicint','Attribute','Backtrace',
+ 'Backtrace::Frame','Bag','Baggy','BagHash','Blob','Block','Bool','Buf',
+ 'Callable','CallFrame','Cancellation','Capture','CArray','Channel','Code',
+ 'compiler','Complex','ComplexStr','Cool','CurrentThreadScheduler',
+ 'Cursor','Date','Dateish','DateTime','Distro','Duration','Encoding',
+ 'Exception','Failure','FatRat','Grammar','Hash','HyperWhatever','Instant',
+ 'Int','int16','int32','int64','int8','IntStr','IO','IO::ArgFiles',
+ 'IO::CatHandle','IO::Handle','IO::Notification','IO::Path',
+ 'IO::Path::Cygwin','IO::Path::QNX','IO::Path::Unix','IO::Path::Win32',
+ 'IO::Pipe','IO::Socket','IO::Socket::Async','IO::Socket::INET','IO::Spec',
+ 'IO::Spec::Cygwin','IO::Spec::QNX','IO::Spec::Unix','IO::Spec::Win32',
+ 'IO::Special','Iterable','Iterator','Junction','Kernel','Label','List',
+ 'Lock','Lock::Async','long','longlong','Macro','Map','Match',
+ 'Metamodel::AttributeContainer','Metamodel::C3MRO','Metamodel::ClassHOW',
+ 'Metamodel::EnumHOW','Metamodel::Finalization','Metamodel::MethodContainer',
+ 'Metamodel::MROBasedMethodDispatch','Metamodel::MultipleInheritance',
+ 'Metamodel::Naming','Metamodel::Primitives','Metamodel::PrivateMethodContainer',
+ 'Metamodel::RoleContainer','Metamodel::Trusting','Method','Mix','MixHash',
+ 'Mixy','Mu','NFC','NFD','NFKC','NFKD','Nil','Num','num32','num64',
+ 'Numeric','NumStr','ObjAt','Order','Pair','Parameter','Perl','Pod::Block',
+ 'Pod::Block::Code','Pod::Block::Comment','Pod::Block::Declarator',
+ 'Pod::Block::Named','Pod::Block::Para','Pod::Block::Table','Pod::Heading',
+ 'Pod::Item','Pointer','Positional','PositionalBindFailover','Proc',
+ 'Proc::Async','Promise','Proxy','PseudoStash','QuantHash','Range','Rat',
+ 'Rational','RatStr','Real','Regex','Routine','Scalar','Scheduler',
+ 'Semaphore','Seq','Set','SetHash','Setty','Signature','size_t','Slip',
+ 'Stash','Str','StrDistance','Stringy','Sub','Submethod','Supplier',
+ 'Supplier::Preserving','Supply','Systemic','Tap','Telemetry',
+ 'Telemetry::Instrument::Thread','Telemetry::Instrument::Usage',
+ 'Telemetry::Period','Telemetry::Sampler','Thread','ThreadPoolScheduler',
+ 'UInt','uint16','uint32','uint64','uint8','Uni','utf8','Variable',
+ 'Version','VM','Whatever','WhateverCode','WrapHandle'
)
PERL6_OPERATORS = (
@@ -418,76 +418,76 @@ class Perl6Lexer(ExtendedRegexLexer):
'~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^',
'!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv',
'&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so',
- 'not', '<==', '==>', '<<==', '==>>','unicmp',
+ 'not', '<==', '==>', '<<==', '==>>','unicmp',
)
# Perl 6 has a *lot* of possible bracketing characters
# this list was lifted from STD.pm6 (https://github.com/perl6/std)
PERL6_BRACKETS = {
- '\u0028': '\u0029', '\u003c': '\u003e', '\u005b': '\u005d',
- '\u007b': '\u007d', '\u00ab': '\u00bb', '\u0f3a': '\u0f3b',
- '\u0f3c': '\u0f3d', '\u169b': '\u169c', '\u2018': '\u2019',
- '\u201a': '\u2019', '\u201b': '\u2019', '\u201c': '\u201d',
- '\u201e': '\u201d', '\u201f': '\u201d', '\u2039': '\u203a',
- '\u2045': '\u2046', '\u207d': '\u207e', '\u208d': '\u208e',
- '\u2208': '\u220b', '\u2209': '\u220c', '\u220a': '\u220d',
- '\u2215': '\u29f5', '\u223c': '\u223d', '\u2243': '\u22cd',
- '\u2252': '\u2253', '\u2254': '\u2255', '\u2264': '\u2265',
- '\u2266': '\u2267', '\u2268': '\u2269', '\u226a': '\u226b',
- '\u226e': '\u226f', '\u2270': '\u2271', '\u2272': '\u2273',
- '\u2274': '\u2275', '\u2276': '\u2277', '\u2278': '\u2279',
- '\u227a': '\u227b', '\u227c': '\u227d', '\u227e': '\u227f',
- '\u2280': '\u2281', '\u2282': '\u2283', '\u2284': '\u2285',
- '\u2286': '\u2287', '\u2288': '\u2289', '\u228a': '\u228b',
- '\u228f': '\u2290', '\u2291': '\u2292', '\u2298': '\u29b8',
- '\u22a2': '\u22a3', '\u22a6': '\u2ade', '\u22a8': '\u2ae4',
- '\u22a9': '\u2ae3', '\u22ab': '\u2ae5', '\u22b0': '\u22b1',
- '\u22b2': '\u22b3', '\u22b4': '\u22b5', '\u22b6': '\u22b7',
- '\u22c9': '\u22ca', '\u22cb': '\u22cc', '\u22d0': '\u22d1',
- '\u22d6': '\u22d7', '\u22d8': '\u22d9', '\u22da': '\u22db',
- '\u22dc': '\u22dd', '\u22de': '\u22df', '\u22e0': '\u22e1',
- '\u22e2': '\u22e3', '\u22e4': '\u22e5', '\u22e6': '\u22e7',
- '\u22e8': '\u22e9', '\u22ea': '\u22eb', '\u22ec': '\u22ed',
- '\u22f0': '\u22f1', '\u22f2': '\u22fa', '\u22f3': '\u22fb',
- '\u22f4': '\u22fc', '\u22f6': '\u22fd', '\u22f7': '\u22fe',
- '\u2308': '\u2309', '\u230a': '\u230b', '\u2329': '\u232a',
- '\u23b4': '\u23b5', '\u2768': '\u2769', '\u276a': '\u276b',
- '\u276c': '\u276d', '\u276e': '\u276f', '\u2770': '\u2771',
- '\u2772': '\u2773', '\u2774': '\u2775', '\u27c3': '\u27c4',
- '\u27c5': '\u27c6', '\u27d5': '\u27d6', '\u27dd': '\u27de',
- '\u27e2': '\u27e3', '\u27e4': '\u27e5', '\u27e6': '\u27e7',
- '\u27e8': '\u27e9', '\u27ea': '\u27eb', '\u2983': '\u2984',
- '\u2985': '\u2986', '\u2987': '\u2988', '\u2989': '\u298a',
- '\u298b': '\u298c', '\u298d': '\u298e', '\u298f': '\u2990',
- '\u2991': '\u2992', '\u2993': '\u2994', '\u2995': '\u2996',
- '\u2997': '\u2998', '\u29c0': '\u29c1', '\u29c4': '\u29c5',
- '\u29cf': '\u29d0', '\u29d1': '\u29d2', '\u29d4': '\u29d5',
- '\u29d8': '\u29d9', '\u29da': '\u29db', '\u29f8': '\u29f9',
- '\u29fc': '\u29fd', '\u2a2b': '\u2a2c', '\u2a2d': '\u2a2e',
- '\u2a34': '\u2a35', '\u2a3c': '\u2a3d', '\u2a64': '\u2a65',
- '\u2a79': '\u2a7a', '\u2a7d': '\u2a7e', '\u2a7f': '\u2a80',
- '\u2a81': '\u2a82', '\u2a83': '\u2a84', '\u2a8b': '\u2a8c',
- '\u2a91': '\u2a92', '\u2a93': '\u2a94', '\u2a95': '\u2a96',
- '\u2a97': '\u2a98', '\u2a99': '\u2a9a', '\u2a9b': '\u2a9c',
- '\u2aa1': '\u2aa2', '\u2aa6': '\u2aa7', '\u2aa8': '\u2aa9',
- '\u2aaa': '\u2aab', '\u2aac': '\u2aad', '\u2aaf': '\u2ab0',
- '\u2ab3': '\u2ab4', '\u2abb': '\u2abc', '\u2abd': '\u2abe',
- '\u2abf': '\u2ac0', '\u2ac1': '\u2ac2', '\u2ac3': '\u2ac4',
- '\u2ac5': '\u2ac6', '\u2acd': '\u2ace', '\u2acf': '\u2ad0',
- '\u2ad1': '\u2ad2', '\u2ad3': '\u2ad4', '\u2ad5': '\u2ad6',
- '\u2aec': '\u2aed', '\u2af7': '\u2af8', '\u2af9': '\u2afa',
- '\u2e02': '\u2e03', '\u2e04': '\u2e05', '\u2e09': '\u2e0a',
- '\u2e0c': '\u2e0d', '\u2e1c': '\u2e1d', '\u2e20': '\u2e21',
- '\u3008': '\u3009', '\u300a': '\u300b', '\u300c': '\u300d',
- '\u300e': '\u300f', '\u3010': '\u3011', '\u3014': '\u3015',
- '\u3016': '\u3017', '\u3018': '\u3019', '\u301a': '\u301b',
- '\u301d': '\u301e', '\ufd3e': '\ufd3f', '\ufe17': '\ufe18',
- '\ufe35': '\ufe36', '\ufe37': '\ufe38', '\ufe39': '\ufe3a',
- '\ufe3b': '\ufe3c', '\ufe3d': '\ufe3e', '\ufe3f': '\ufe40',
- '\ufe41': '\ufe42', '\ufe43': '\ufe44', '\ufe47': '\ufe48',
- '\ufe59': '\ufe5a', '\ufe5b': '\ufe5c', '\ufe5d': '\ufe5e',
- '\uff08': '\uff09', '\uff1c': '\uff1e', '\uff3b': '\uff3d',
- '\uff5b': '\uff5d', '\uff5f': '\uff60', '\uff62': '\uff63',
+ '\u0028': '\u0029', '\u003c': '\u003e', '\u005b': '\u005d',
+ '\u007b': '\u007d', '\u00ab': '\u00bb', '\u0f3a': '\u0f3b',
+ '\u0f3c': '\u0f3d', '\u169b': '\u169c', '\u2018': '\u2019',
+ '\u201a': '\u2019', '\u201b': '\u2019', '\u201c': '\u201d',
+ '\u201e': '\u201d', '\u201f': '\u201d', '\u2039': '\u203a',
+ '\u2045': '\u2046', '\u207d': '\u207e', '\u208d': '\u208e',
+ '\u2208': '\u220b', '\u2209': '\u220c', '\u220a': '\u220d',
+ '\u2215': '\u29f5', '\u223c': '\u223d', '\u2243': '\u22cd',
+ '\u2252': '\u2253', '\u2254': '\u2255', '\u2264': '\u2265',
+ '\u2266': '\u2267', '\u2268': '\u2269', '\u226a': '\u226b',
+ '\u226e': '\u226f', '\u2270': '\u2271', '\u2272': '\u2273',
+ '\u2274': '\u2275', '\u2276': '\u2277', '\u2278': '\u2279',
+ '\u227a': '\u227b', '\u227c': '\u227d', '\u227e': '\u227f',
+ '\u2280': '\u2281', '\u2282': '\u2283', '\u2284': '\u2285',
+ '\u2286': '\u2287', '\u2288': '\u2289', '\u228a': '\u228b',
+ '\u228f': '\u2290', '\u2291': '\u2292', '\u2298': '\u29b8',
+ '\u22a2': '\u22a3', '\u22a6': '\u2ade', '\u22a8': '\u2ae4',
+ '\u22a9': '\u2ae3', '\u22ab': '\u2ae5', '\u22b0': '\u22b1',
+ '\u22b2': '\u22b3', '\u22b4': '\u22b5', '\u22b6': '\u22b7',
+ '\u22c9': '\u22ca', '\u22cb': '\u22cc', '\u22d0': '\u22d1',
+ '\u22d6': '\u22d7', '\u22d8': '\u22d9', '\u22da': '\u22db',
+ '\u22dc': '\u22dd', '\u22de': '\u22df', '\u22e0': '\u22e1',
+ '\u22e2': '\u22e3', '\u22e4': '\u22e5', '\u22e6': '\u22e7',
+ '\u22e8': '\u22e9', '\u22ea': '\u22eb', '\u22ec': '\u22ed',
+ '\u22f0': '\u22f1', '\u22f2': '\u22fa', '\u22f3': '\u22fb',
+ '\u22f4': '\u22fc', '\u22f6': '\u22fd', '\u22f7': '\u22fe',
+ '\u2308': '\u2309', '\u230a': '\u230b', '\u2329': '\u232a',
+ '\u23b4': '\u23b5', '\u2768': '\u2769', '\u276a': '\u276b',
+ '\u276c': '\u276d', '\u276e': '\u276f', '\u2770': '\u2771',
+ '\u2772': '\u2773', '\u2774': '\u2775', '\u27c3': '\u27c4',
+ '\u27c5': '\u27c6', '\u27d5': '\u27d6', '\u27dd': '\u27de',
+ '\u27e2': '\u27e3', '\u27e4': '\u27e5', '\u27e6': '\u27e7',
+ '\u27e8': '\u27e9', '\u27ea': '\u27eb', '\u2983': '\u2984',
+ '\u2985': '\u2986', '\u2987': '\u2988', '\u2989': '\u298a',
+ '\u298b': '\u298c', '\u298d': '\u298e', '\u298f': '\u2990',
+ '\u2991': '\u2992', '\u2993': '\u2994', '\u2995': '\u2996',
+ '\u2997': '\u2998', '\u29c0': '\u29c1', '\u29c4': '\u29c5',
+ '\u29cf': '\u29d0', '\u29d1': '\u29d2', '\u29d4': '\u29d5',
+ '\u29d8': '\u29d9', '\u29da': '\u29db', '\u29f8': '\u29f9',
+ '\u29fc': '\u29fd', '\u2a2b': '\u2a2c', '\u2a2d': '\u2a2e',
+ '\u2a34': '\u2a35', '\u2a3c': '\u2a3d', '\u2a64': '\u2a65',
+ '\u2a79': '\u2a7a', '\u2a7d': '\u2a7e', '\u2a7f': '\u2a80',
+ '\u2a81': '\u2a82', '\u2a83': '\u2a84', '\u2a8b': '\u2a8c',
+ '\u2a91': '\u2a92', '\u2a93': '\u2a94', '\u2a95': '\u2a96',
+ '\u2a97': '\u2a98', '\u2a99': '\u2a9a', '\u2a9b': '\u2a9c',
+ '\u2aa1': '\u2aa2', '\u2aa6': '\u2aa7', '\u2aa8': '\u2aa9',
+ '\u2aaa': '\u2aab', '\u2aac': '\u2aad', '\u2aaf': '\u2ab0',
+ '\u2ab3': '\u2ab4', '\u2abb': '\u2abc', '\u2abd': '\u2abe',
+ '\u2abf': '\u2ac0', '\u2ac1': '\u2ac2', '\u2ac3': '\u2ac4',
+ '\u2ac5': '\u2ac6', '\u2acd': '\u2ace', '\u2acf': '\u2ad0',
+ '\u2ad1': '\u2ad2', '\u2ad3': '\u2ad4', '\u2ad5': '\u2ad6',
+ '\u2aec': '\u2aed', '\u2af7': '\u2af8', '\u2af9': '\u2afa',
+ '\u2e02': '\u2e03', '\u2e04': '\u2e05', '\u2e09': '\u2e0a',
+ '\u2e0c': '\u2e0d', '\u2e1c': '\u2e1d', '\u2e20': '\u2e21',
+ '\u3008': '\u3009', '\u300a': '\u300b', '\u300c': '\u300d',
+ '\u300e': '\u300f', '\u3010': '\u3011', '\u3014': '\u3015',
+ '\u3016': '\u3017', '\u3018': '\u3019', '\u301a': '\u301b',
+ '\u301d': '\u301e', '\ufd3e': '\ufd3f', '\ufe17': '\ufe18',
+ '\ufe35': '\ufe36', '\ufe37': '\ufe38', '\ufe39': '\ufe3a',
+ '\ufe3b': '\ufe3c', '\ufe3d': '\ufe3e', '\ufe3f': '\ufe40',
+ '\ufe41': '\ufe42', '\ufe43': '\ufe44', '\ufe47': '\ufe48',
+ '\ufe59': '\ufe5a', '\ufe5b': '\ufe5c', '\ufe5d': '\ufe5e',
+ '\uff08': '\uff09', '\uff1c': '\uff1e', '\uff3b': '\uff3d',
+ '\uff5b': '\uff5d', '\uff5f': '\uff60', '\uff62': '\uff63',
}
def _build_word_match(words, boundary_regex_fragment=None, prefix='', suffix=''):
@@ -611,11 +611,11 @@ class Perl6Lexer(ExtendedRegexLexer):
Name.Builtin),
(_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin),
# copied from PerlLexer
- (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*',
+ (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*',
Name.Variable),
(r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
(r'::\?\w+', Name.Variable.Global),
- (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*',
+ (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*',
Name.Variable.Global),
(r'\$(?:<.*?>)+', Name.Variable),
(r'(?:q|qq|Q)[a-zA-Z]?\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z:\s])'
@@ -720,12 +720,12 @@ class Perl6Lexer(ExtendedRegexLexer):
continue
break
- if ':=' in text:
- # Same logic as above for PerlLexer
- rating /= 2
-
+ if ':=' in text:
+ # Same logic as above for PerlLexer
+ rating /= 2
+
return rating
def __init__(self, **options):
- super().__init__(**options)
+ super().__init__(**options)
self.encoding = options.get('encoding', 'utf-8')
diff --git a/contrib/python/Pygments/py3/pygments/lexers/php.py b/contrib/python/Pygments/py3/pygments/lexers/php.py
index 3ba299ac0a..4f55b33e71 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/php.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/php.py
@@ -4,23 +4,23 @@
Lexers for PHP and related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, default, \
- using, this, words, do_insertions
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, default, \
+ using, this, words, do_insertions
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Other, Generic
-from pygments.util import get_bool_opt, get_list_opt, shebang_matches
+ Number, Punctuation, Other, Generic
+from pygments.util import get_bool_opt, get_list_opt, shebang_matches
-__all__ = ['ZephirLexer', 'PsyshConsoleLexer', 'PhpLexer']
-
-line_re = re.compile('.*?\n')
+__all__ = ['ZephirLexer', 'PsyshConsoleLexer', 'PhpLexer']
+line_re = re.compile('.*?\n')
+
class ZephirLexer(RegexLexer):
"""
For `Zephir language <http://zephir-lang.com/>`_ source code.
@@ -50,14 +50,14 @@ class ZephirLexer(RegexLexer):
include('commentsandwhitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'/', Operator, '#pop'),
+ (r'/', Operator, '#pop'),
default('#pop')
],
'badregex': [
(r'\n', Text, '#pop')
],
'root': [
- (r'^(?=\s|/)', Text, 'slashstartsregex'),
+ (r'^(?=\s|/)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
r'(<<|>>>?|==?|!=?|->|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
@@ -80,61 +80,61 @@ class ZephirLexer(RegexLexer):
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
]
}
-class PsyshConsoleLexer(Lexer):
- """
- For `PsySH`_ console output, such as:
-
- .. sourcecode:: psysh
-
- >>> $greeting = function($name): string {
- ... return "Hello, {$name}";
- ... };
- => Closure($name): string {#2371 …3}
- >>> $greeting('World')
- => "Hello, World"
-
- .. _PsySH: https://psysh.org/
- .. versionadded:: 2.7
- """
- name = 'PsySH console session for PHP'
- aliases = ['psysh']
-
- def __init__(self, **options):
- options['startinline'] = True
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- phplexer = PhpLexer(**self.options)
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith('>>> ') or line.startswith('... '):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:4])]))
- curcode += line[4:]
- elif line.rstrip() == '...':
- insertions.append((len(curcode),
- [(0, Generic.Prompt, '...')]))
- curcode += line[3:]
- else:
- if curcode:
- yield from do_insertions(
- insertions, phplexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
- yield from do_insertions(insertions,
- phplexer.get_tokens_unprocessed(curcode))
-
-
+class PsyshConsoleLexer(Lexer):
+ """
+ For `PsySH`_ console output, such as:
+
+ .. sourcecode:: psysh
+
+ >>> $greeting = function($name): string {
+ ... return "Hello, {$name}";
+ ... };
+ => Closure($name): string {#2371 …3}
+ >>> $greeting('World')
+ => "Hello, World"
+
+ .. _PsySH: https://psysh.org/
+ .. versionadded:: 2.7
+ """
+ name = 'PsySH console session for PHP'
+ aliases = ['psysh']
+
+ def __init__(self, **options):
+ options['startinline'] = True
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ phplexer = PhpLexer(**self.options)
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith('>>> ') or line.startswith('... '):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:4])]))
+ curcode += line[4:]
+ elif line.rstrip() == '...':
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, '...')]))
+ curcode += line[3:]
+ else:
+ if curcode:
+ yield from do_insertions(
+ insertions, phplexer.get_tokens_unprocessed(curcode))
+ curcode = ''
+ insertions = []
+ yield match.start(), Generic.Output, line
+ if curcode:
+ yield from do_insertions(insertions,
+ phplexer.get_tokens_unprocessed(curcode))
+
+
class PhpLexer(RegexLexer):
"""
For `PHP <http://www.php.net/>`_ source code.
@@ -294,7 +294,7 @@ class PhpLexer(RegexLexer):
self._functions = set()
if self.funcnamehighlighting:
from pygments.lexers._php_builtins import MODULES
- for key, value in MODULES.items():
+ for key, value in MODULES.items():
if key not in self.disabledmodules:
self._functions.update(value)
RegexLexer.__init__(self, **options)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/pointless.py b/contrib/python/Pygments/py3/pygments/lexers/pointless.py
index c340107374..397de3a2c6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/pointless.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/pointless.py
@@ -1,70 +1,70 @@
-"""
- pygments.lexers.pointless
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Pointless.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
- Punctuation, String, Text
-
-__all__ = ['PointlessLexer']
-
-
-class PointlessLexer(RegexLexer):
- """
- For `Pointless <https://ptls.dev>`_ source code.
-
- .. versionadded:: 2.7
- """
-
- name = 'Pointless'
- aliases = ['pointless']
- filenames = ['*.ptls']
-
- ops = words([
- "+", "-", "*", "/", "**", "%", "+=", "-=", "*=",
- "/=", "**=", "%=", "|>", "=", "==", "!=", "<", ">",
- "<=", ">=", "=>", "$", "++",
- ])
-
- keywords = words([
- "if", "then", "else", "where", "with", "cond",
- "case", "and", "or", "not", "in", "as", "for",
- "requires", "throw", "try", "catch", "when",
- "yield", "upval",
- ], suffix=r'\b')
-
- tokens = {
- 'root': [
- (r'[ \n\r]+', Text),
- (r'--.*$', Comment.Single),
- (r'"""', String, 'multiString'),
- (r'"', String, 'string'),
- (r'[\[\](){}:;,.]', Punctuation),
- (ops, Operator),
- (keywords, Keyword),
- (r'\d+|\d*\.\d+', Number),
- (r'(true|false)\b', Name.Builtin),
- (r'[A-Z][a-zA-Z0-9]*\b', String.Symbol),
- (r'output\b', Name.Variable.Magic),
- (r'(export|import)\b', Keyword.Namespace),
- (r'[a-z][a-zA-Z0-9]*\b', Name.Variable)
- ],
- 'multiString': [
- (r'\\.', String.Escape),
- (r'"""', String, '#pop'),
- (r'"', String),
- (r'[^\\"]+', String),
- ],
- 'string': [
- (r'\\.', String.Escape),
- (r'"', String, '#pop'),
- (r'\n', Error),
- (r'[^\\"]+', String),
- ],
- }
+"""
+ pygments.lexers.pointless
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Pointless.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
+ Punctuation, String, Text
+
+__all__ = ['PointlessLexer']
+
+
+class PointlessLexer(RegexLexer):
+ """
+ For `Pointless <https://ptls.dev>`_ source code.
+
+ .. versionadded:: 2.7
+ """
+
+ name = 'Pointless'
+ aliases = ['pointless']
+ filenames = ['*.ptls']
+
+ ops = words([
+ "+", "-", "*", "/", "**", "%", "+=", "-=", "*=",
+ "/=", "**=", "%=", "|>", "=", "==", "!=", "<", ">",
+ "<=", ">=", "=>", "$", "++",
+ ])
+
+ keywords = words([
+ "if", "then", "else", "where", "with", "cond",
+ "case", "and", "or", "not", "in", "as", "for",
+ "requires", "throw", "try", "catch", "when",
+ "yield", "upval",
+ ], suffix=r'\b')
+
+ tokens = {
+ 'root': [
+ (r'[ \n\r]+', Text),
+ (r'--.*$', Comment.Single),
+ (r'"""', String, 'multiString'),
+ (r'"', String, 'string'),
+ (r'[\[\](){}:;,.]', Punctuation),
+ (ops, Operator),
+ (keywords, Keyword),
+ (r'\d+|\d*\.\d+', Number),
+ (r'(true|false)\b', Name.Builtin),
+ (r'[A-Z][a-zA-Z0-9]*\b', String.Symbol),
+ (r'output\b', Name.Variable.Magic),
+ (r'(export|import)\b', Keyword.Namespace),
+ (r'[a-z][a-zA-Z0-9]*\b', Name.Variable)
+ ],
+ 'multiString': [
+ (r'\\.', String.Escape),
+ (r'"""', String, '#pop'),
+ (r'"', String),
+ (r'[^\\"]+', String),
+ ],
+ 'string': [
+ (r'\\.', String.Escape),
+ (r'"', String, '#pop'),
+ (r'\n', Error),
+ (r'[^\\"]+', String),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/pony.py b/contrib/python/Pygments/py3/pygments/lexers/pony.py
index 0cd5dbd3df..bfac8810ec 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/pony.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/pony.py
@@ -4,7 +4,7 @@
Lexers for Pony and related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -69,7 +69,7 @@ class PonyLexer(RegexLexer):
(r'\d+', Number.Integer),
(r'(true|false)\b', Name.Builtin),
(r'_\d*', Name),
- (r'_?[a-z][\w\']*', Name)
+ (r'_?[a-z][\w\']*', Name)
],
'typename': [
(_caps + r'?((?:\s)*)(_?[A-Z]\w*)',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/praat.py b/contrib/python/Pygments/py3/pygments/lexers/praat.py
index 8fbae8c520..f2a4b5286b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/praat.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/praat.py
@@ -4,7 +4,7 @@
Lexer for Praat
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -54,7 +54,7 @@ class PraatLexer(RegexLexer):
'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
- 'index_regex', 'integer', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
+ 'index_regex', 'integer', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
@@ -62,9 +62,9 @@ class PraatLexer(RegexLexer):
'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
- 'selected', 'semitonesToHertz', 'sentence', 'sentencetext', 'sigmoid', 'sin', 'sinc',
+ 'selected', 'semitonesToHertz', 'sentence', 'sentencetext', 'sigmoid', 'sin', 'sinc',
'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
- 'studentQ', 'tan', 'tanh', 'text', 'variableExists', 'word', 'writeFile', 'writeFileLine',
+ 'studentQ', 'tan', 'tanh', 'text', 'variableExists', 'word', 'writeFile', 'writeFileLine',
'writeInfo', 'writeInfoLine',
)
@@ -89,9 +89,9 @@ class PraatLexer(RegexLexer):
'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
- 'MixingMatrix', 'Movie', 'Network', 'Object', 'OTGrammar', 'OTHistory', 'OTMulti',
- 'PCA', 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo',
- 'Pitch', 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
+ 'MixingMatrix', 'Movie', 'Network', 'Object', 'OTGrammar', 'OTHistory', 'OTMulti',
+ 'PCA', 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo',
+ 'Pitch', 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
@@ -111,10 +111,10 @@ class PraatLexer(RegexLexer):
'defaultDirectory',
)
- object_attributes = (
- 'ncol', 'nrow', 'xmin', 'ymin', 'xmax', 'ymax', 'nx', 'ny', 'dx', 'dy',
- )
-
+ object_attributes = (
+ 'ncol', 'nrow', 'xmin', 'ymin', 'xmax', 'ymax', 'nx', 'ny', 'dx', 'dy',
+ )
+
tokens = {
'root': [
(r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
@@ -151,9 +151,9 @@ class PraatLexer(RegexLexer):
],
'command': [
(r'( ?[\w()-]+ ?)', Keyword),
-
- include('string_interpolated'),
-
+
+ include('string_interpolated'),
+
(r'\.{3}', Keyword, ('#pop', 'old_arguments')),
(r':', Keyword, ('#pop', 'comma_list')),
(r'\s', Text, '#pop'),
@@ -212,39 +212,39 @@ class PraatLexer(RegexLexer):
(r'\n', Text, '#pop'),
(r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
],
- 'object_reference': [
- include('string_interpolated'),
- (r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
-
- (words(object_attributes, prefix=r'\.'), Name.Builtin, '#pop'),
-
- (r'\$', Name.Builtin),
- (r'\[', Text, '#pop'),
+ 'object_reference': [
+ include('string_interpolated'),
+ (r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
+
+ (words(object_attributes, prefix=r'\.'), Name.Builtin, '#pop'),
+
+ (r'\$', Name.Builtin),
+ (r'\[', Text, '#pop'),
],
'variable_name': [
include('operator'),
include('number'),
(words(variables_string, suffix=r'\$'), Name.Variable.Global),
- (words(variables_numeric,
- suffix=r'(?=[^a-zA-Z0-9_."\'$#\[:(]|\s|^|$)'),
- Name.Variable.Global),
+ (words(variables_numeric,
+ suffix=r'(?=[^a-zA-Z0-9_."\'$#\[:(]|\s|^|$)'),
+ Name.Variable.Global),
- (words(objects, prefix=r'\b', suffix=r"(_)"),
- bygroups(Name.Builtin, Name.Builtin),
- 'object_reference'),
+ (words(objects, prefix=r'\b', suffix=r"(_)"),
+ bygroups(Name.Builtin, Name.Builtin),
+ 'object_reference'),
(r'\.?_?[a-z][\w.]*(\$|#)?', Text),
(r'[\[\]]', Punctuation, 'comma_list'),
-
- include('string_interpolated'),
+
+ include('string_interpolated'),
],
'operator': [
(r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)', Operator),
(r'(?<![\w.])(and|or|not|div|mod)(?![\w.])', Operator.Word),
],
'string_interpolated': [
- (r'\'[_a-z][^\[\]\'":]*(\[([\d,]+|"[\w,]+")\])?(:[0-9]+)?\'',
+ (r'\'[_a-z][^\[\]\'":]*(\[([\d,]+|"[\w,]+")\])?(:[0-9]+)?\'',
String.Interpol),
],
'string_unquoted': [
@@ -252,9 +252,9 @@ class PraatLexer(RegexLexer):
(r'\n', Text, '#pop'),
(r'\s', Text),
-
- include('string_interpolated'),
-
+
+ include('string_interpolated'),
+
(r"'", String),
(r"[^'\n]+", String),
],
@@ -262,14 +262,14 @@ class PraatLexer(RegexLexer):
(r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
(r'"', String, '#pop'),
-
- include('string_interpolated'),
-
+
+ include('string_interpolated'),
+
(r"'", String),
(r'[^\'"\n]+', String),
],
'old_form': [
- (r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
+ (r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
(r'\s+', Text),
(r'(optionmenu|choice)([ \t]+\S+:[ \t]+)',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/procfile.py b/contrib/python/Pygments/py3/pygments/lexers/procfile.py
index cac0a25440..9856f919a4 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/procfile.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/procfile.py
@@ -1,43 +1,43 @@
-"""
- pygments.lexers.procfile
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Procfile file format.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Name, Number, String, Text, Punctuation
-
-__all__ = ["ProcfileLexer"]
-
-
-class ProcfileLexer(RegexLexer):
- """
- Lexer for Procfile file format.
-
- The format is used to run processes on Heroku or is used by Foreman or
- Honcho tools.
- For more information about the definition of the format, see:
- https://devcenter.heroku.com/articles/procfile#procfile-format
-
- .. versionadded:: 2.10
- """
- name = 'Procfile'
- aliases = ['procfile']
- filenames = ['Procfile']
-
- tokens = {
- 'root': [
- (r'^([a-z]+)(:)', bygroups(Name.Label, Punctuation)),
- (r'\s+', Text.Whitespace),
- (r'"[^"]*"', String),
- (r"'[^']*'", String),
- (r'[0-9]+', Number.Integer),
- (r'\$[a-zA-Z_][\w]*', Name.Variable),
- (r'(\w+)(=)(\w+)', bygroups(Name.Variable, Punctuation, String)),
- (r'([\w\-\./]+)', Text),
- ],
- }
+"""
+ pygments.lexers.procfile
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Procfile file format.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Name, Number, String, Text, Punctuation
+
+__all__ = ["ProcfileLexer"]
+
+
+class ProcfileLexer(RegexLexer):
+ """
+ Lexer for Procfile file format.
+
+ The format is used to run processes on Heroku or is used by Foreman or
+ Honcho tools.
+ For more information about the definition of the format, see:
+ https://devcenter.heroku.com/articles/procfile#procfile-format
+
+ .. versionadded:: 2.10
+ """
+ name = 'Procfile'
+ aliases = ['procfile']
+ filenames = ['Procfile']
+
+ tokens = {
+ 'root': [
+ (r'^([a-z]+)(:)', bygroups(Name.Label, Punctuation)),
+ (r'\s+', Text.Whitespace),
+ (r'"[^"]*"', String),
+ (r"'[^']*'", String),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[a-zA-Z_][\w]*', Name.Variable),
+ (r'(\w+)(=)(\w+)', bygroups(Name.Variable, Punctuation, String)),
+ (r'([\w\-\./]+)', Text),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/prolog.py b/contrib/python/Pygments/py3/pygments/lexers/prolog.py
index 21c813625d..9c198f9ab2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/prolog.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/prolog.py
@@ -4,7 +4,7 @@
Lexers for Prolog and Prolog-like languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -54,22 +54,22 @@ class PrologLexer(RegexLexer):
(r'(mod|div|not)\b', Operator),
(r'_', Keyword), # The don't-care variable
(r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
- (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
- r'(\s*)(:-|-->)',
+ (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
+ r'(\s*)(:-|-->)',
bygroups(Name.Function, Text, Operator)), # function defn
- (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
- r'(\s*)(\()',
+ (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
+ r'(\s*)(\()',
bygroups(Name.Function, Text, Punctuation)),
- (r'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
+ (r'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
String.Atom), # atom, characters
# This one includes !
- (r'[#&*+\-./:<=>?@\\^~\u00a1-\u00bf\u2010-\u303f]+',
+ (r'[#&*+\-./:<=>?@\\^~\u00a1-\u00bf\u2010-\u303f]+',
String.Atom), # atom, graphics
(r'[A-Z_]\w*', Name.Variable),
- (r'\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
+ (r'\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
],
'nested-comment': [
(r'\*/', Comment.Multiline, '#pop'),
@@ -106,19 +106,19 @@ class LogtalkLexer(RegexLexer):
(r'\n', Text),
(r'\s+', Text),
# Numbers
- (r"0'[\\]?.", Number),
+ (r"0'[\\]?.", Number),
(r'0b[01]+', Number.Bin),
(r'0o[0-7]+', Number.Oct),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
# Variables
- (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+ (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
# Event handlers
(r'(after|before)(?=[(])', Keyword),
# Message forwarding handler
(r'forward(?=[(])', Keyword),
# Execution-context methods
- (r'(context|parameter|this|se(lf|nder))(?=[(])', Keyword),
+ (r'(context|parameter|this|se(lf|nder))(?=[(])', Keyword),
# Reflection
(r'(current_predicate|predicate_property)(?=[(])', Keyword),
# DCGs and term expansion
@@ -134,23 +134,23 @@ class LogtalkLexer(RegexLexer):
# Events
(r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
# Flags
- (r'(create|current|set)_logtalk_flag(?=[(])', Keyword),
+ (r'(create|current|set)_logtalk_flag(?=[(])', Keyword),
# Compiling, loading, and library paths
- (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword),
+ (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword),
(r'\blogtalk_make\b', Keyword),
# Database
(r'(clause|retract(all)?)(?=[(])', Keyword),
(r'a(bolish|ssert(a|z))(?=[(])', Keyword),
# Control constructs
(r'(ca(ll|tch)|throw)(?=[(])', Keyword),
- (r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword),
- (r'(type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
+ (r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword),
+ (r'(type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
# All solutions
(r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
- # Multi-threading predicates
- (r'threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
- # Engine predicates
- (r'threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword),
+ # Multi-threading predicates
+ (r'threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
+ # Engine predicates
+ (r'threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword),
# Term unification
(r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
# Term creation and decomposition
@@ -162,7 +162,7 @@ class LogtalkLexer(RegexLexer):
# Other arithmetic functors
(r'(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword),
# Term testing
- (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword),
+ (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword),
# Term comparison
(r'compare(?=[(])', Keyword),
# Stream selection and control
@@ -226,11 +226,11 @@ class LogtalkLexer(RegexLexer):
# Existential quantifier
(r'\^', Operator),
# Strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # Punctuation
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ # Punctuation
(r'[()\[\],.|]', Text),
# Atoms
- (r"[a-z][a-zA-Z0-9_]*", Text),
+ (r"[a-z][a-zA-Z0-9_]*", Text),
(r"'", String, 'quoted_atom'),
],
@@ -245,43 +245,43 @@ class LogtalkLexer(RegexLexer):
'directive': [
# Conditional compilation directives
(r'(el)?if(?=[(])', Keyword, 'root'),
- (r'(e(lse|ndif))(?=[.])', Keyword, 'root'),
+ (r'(e(lse|ndif))(?=[.])', Keyword, 'root'),
# Entity directives
(r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
- (r'(end_(category|object|protocol))(?=[.])', Keyword, 'root'),
+ (r'(end_(category|object|protocol))(?=[.])', Keyword, 'root'),
# Predicate scope directives
(r'(public|protected|private)(?=[(])', Keyword, 'root'),
# Other directives
(r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
(r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
- (r'(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'),
- (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
+ (r'(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'),
+ (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
(r'op(?=[(])', Keyword, 'root'),
(r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
- (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
- (r'[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root'),
+ (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
+ (r'[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root'),
],
'entityrelations': [
(r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
# Numbers
- (r"0'[\\]?.", Number),
+ (r"0'[\\]?.", Number),
(r'0b[01]+', Number.Bin),
(r'0o[0-7]+', Number.Oct),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
# Variables
- (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+ (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
# Atoms
- (r"[a-z][a-zA-Z0-9_]*", Text),
+ (r"[a-z][a-zA-Z0-9_]*", Text),
(r"'", String, 'quoted_atom'),
# Strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# End of entity-opening directive
(r'([)]\.)', Text, 'root'),
# Scope operator
(r'(::)', Operator),
- # Punctuation
+ # Punctuation
(r'[()\[\],.|]', Text),
# Comments
(r'%.*?\n', Comment),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/promql.py b/contrib/python/Pygments/py3/pygments/lexers/promql.py
index b9646d4640..3515aac054 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/promql.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/promql.py
@@ -1,182 +1,182 @@
-"""
- pygments.lexers.promql
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Prometheus Query Language.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, default, words
-from pygments.token import (
- Comment,
- Keyword,
- Name,
- Number,
- Operator,
- Punctuation,
- String,
- Whitespace,
-)
-
-__all__ = ["PromQLLexer"]
-
-
-class PromQLLexer(RegexLexer):
- """
- For `PromQL <https://prometheus.io/docs/prometheus/latest/querying/basics/>`_ queries.
-
- For details about the grammar see:
- https://github.com/prometheus/prometheus/tree/master/promql/parser
-
- .. versionadded: 2.7
- """
-
- name = "PromQL"
- aliases = ["promql"]
- filenames = ["*.promql"]
-
- base_keywords = (
- words(
- (
- "bool",
- "by",
- "group_left",
- "group_right",
- "ignoring",
- "offset",
- "on",
- "without",
- ),
- suffix=r"\b",
- ),
- Keyword,
- )
-
- aggregator_keywords = (
- words(
- (
- "sum",
- "min",
- "max",
- "avg",
- "group",
- "stddev",
- "stdvar",
- "count",
- "count_values",
- "bottomk",
- "topk",
- "quantile",
- ),
- suffix=r"\b",
- ),
- Keyword,
- )
-
- function_keywords = (
- words(
- (
- "abs",
- "absent",
- "absent_over_time",
- "avg_over_time",
- "ceil",
- "changes",
- "clamp_max",
- "clamp_min",
- "count_over_time",
- "day_of_month",
- "day_of_week",
- "days_in_month",
- "delta",
- "deriv",
- "exp",
- "floor",
- "histogram_quantile",
- "holt_winters",
- "hour",
- "idelta",
- "increase",
- "irate",
- "label_join",
- "label_replace",
- "ln",
- "log10",
- "log2",
- "max_over_time",
- "min_over_time",
- "minute",
- "month",
- "predict_linear",
- "quantile_over_time",
- "rate",
- "resets",
- "round",
- "scalar",
- "sort",
- "sort_desc",
- "sqrt",
- "stddev_over_time",
- "stdvar_over_time",
- "sum_over_time",
- "time",
- "timestamp",
- "vector",
- "year",
- ),
- suffix=r"\b",
- ),
- Keyword.Reserved,
- )
-
- tokens = {
- "root": [
- (r"\n", Whitespace),
- (r"\s+", Whitespace),
- (r",", Punctuation),
- # Keywords
- base_keywords,
- aggregator_keywords,
- function_keywords,
- # Offsets
- (r"[1-9][0-9]*[smhdwy]", String),
- # Numbers
- (r"-?[0-9]+\.[0-9]+", Number.Float),
- (r"-?[0-9]+", Number.Integer),
- # Comments
- (r"#.*?$", Comment.Single),
- # Operators
- (r"(\+|\-|\*|\/|\%|\^)", Operator),
- (r"==|!=|>=|<=|<|>", Operator),
- (r"and|or|unless", Operator.Word),
- # Metrics
- (r"[_a-zA-Z][a-zA-Z0-9_]+", Name.Variable),
- # Params
- (r'(["\'])(.*?)(["\'])', bygroups(Punctuation, String, Punctuation)),
- # Other states
- (r"\(", Operator, "function"),
- (r"\)", Operator),
- (r"\{", Punctuation, "labels"),
- (r"\[", Punctuation, "range"),
- ],
- "labels": [
- (r"\}", Punctuation, "#pop"),
- (r"\n", Whitespace),
- (r"\s+", Whitespace),
- (r",", Punctuation),
- (r'([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|!~)(\s*?)("|\')(.*?)("|\')',
- bygroups(Name.Label, Whitespace, Operator, Whitespace,
- Punctuation, String, Punctuation)),
- ],
- "range": [
- (r"\]", Punctuation, "#pop"),
- (r"[1-9][0-9]*[smhdwy]", String),
- ],
- "function": [
- (r"\)", Operator, "#pop"),
- (r"\(", Operator, "#push"),
- default("#pop"),
- ],
- }
+"""
+ pygments.lexers.promql
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Prometheus Query Language.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, default, words
+from pygments.token import (
+ Comment,
+ Keyword,
+ Name,
+ Number,
+ Operator,
+ Punctuation,
+ String,
+ Whitespace,
+)
+
+__all__ = ["PromQLLexer"]
+
+
+class PromQLLexer(RegexLexer):
+ """
+ For `PromQL <https://prometheus.io/docs/prometheus/latest/querying/basics/>`_ queries.
+
+ For details about the grammar see:
+ https://github.com/prometheus/prometheus/tree/master/promql/parser
+
+ .. versionadded: 2.7
+ """
+
+ name = "PromQL"
+ aliases = ["promql"]
+ filenames = ["*.promql"]
+
+ base_keywords = (
+ words(
+ (
+ "bool",
+ "by",
+ "group_left",
+ "group_right",
+ "ignoring",
+ "offset",
+ "on",
+ "without",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword,
+ )
+
+ aggregator_keywords = (
+ words(
+ (
+ "sum",
+ "min",
+ "max",
+ "avg",
+ "group",
+ "stddev",
+ "stdvar",
+ "count",
+ "count_values",
+ "bottomk",
+ "topk",
+ "quantile",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword,
+ )
+
+ function_keywords = (
+ words(
+ (
+ "abs",
+ "absent",
+ "absent_over_time",
+ "avg_over_time",
+ "ceil",
+ "changes",
+ "clamp_max",
+ "clamp_min",
+ "count_over_time",
+ "day_of_month",
+ "day_of_week",
+ "days_in_month",
+ "delta",
+ "deriv",
+ "exp",
+ "floor",
+ "histogram_quantile",
+ "holt_winters",
+ "hour",
+ "idelta",
+ "increase",
+ "irate",
+ "label_join",
+ "label_replace",
+ "ln",
+ "log10",
+ "log2",
+ "max_over_time",
+ "min_over_time",
+ "minute",
+ "month",
+ "predict_linear",
+ "quantile_over_time",
+ "rate",
+ "resets",
+ "round",
+ "scalar",
+ "sort",
+ "sort_desc",
+ "sqrt",
+ "stddev_over_time",
+ "stdvar_over_time",
+ "sum_over_time",
+ "time",
+ "timestamp",
+ "vector",
+ "year",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword.Reserved,
+ )
+
+ tokens = {
+ "root": [
+ (r"\n", Whitespace),
+ (r"\s+", Whitespace),
+ (r",", Punctuation),
+ # Keywords
+ base_keywords,
+ aggregator_keywords,
+ function_keywords,
+ # Offsets
+ (r"[1-9][0-9]*[smhdwy]", String),
+ # Numbers
+ (r"-?[0-9]+\.[0-9]+", Number.Float),
+ (r"-?[0-9]+", Number.Integer),
+ # Comments
+ (r"#.*?$", Comment.Single),
+ # Operators
+ (r"(\+|\-|\*|\/|\%|\^)", Operator),
+ (r"==|!=|>=|<=|<|>", Operator),
+ (r"and|or|unless", Operator.Word),
+ # Metrics
+ (r"[_a-zA-Z][a-zA-Z0-9_]+", Name.Variable),
+ # Params
+ (r'(["\'])(.*?)(["\'])', bygroups(Punctuation, String, Punctuation)),
+ # Other states
+ (r"\(", Operator, "function"),
+ (r"\)", Operator),
+ (r"\{", Punctuation, "labels"),
+ (r"\[", Punctuation, "range"),
+ ],
+ "labels": [
+ (r"\}", Punctuation, "#pop"),
+ (r"\n", Whitespace),
+ (r"\s+", Whitespace),
+ (r",", Punctuation),
+ (r'([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|!~)(\s*?)("|\')(.*?)("|\')',
+ bygroups(Name.Label, Whitespace, Operator, Whitespace,
+ Punctuation, String, Punctuation)),
+ ],
+ "range": [
+ (r"\]", Punctuation, "#pop"),
+ (r"[1-9][0-9]*[smhdwy]", String),
+ ],
+ "function": [
+ (r"\)", Operator, "#pop"),
+ (r"\(", Operator, "#push"),
+ default("#pop"),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/python.py b/contrib/python/Pygments/py3/pygments/lexers/python.py
index 2901d7b982..0d9478dc8d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/python.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/python.py
@@ -4,7 +4,7 @@
Lexers for Python and related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,196 +19,196 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
from pygments import unistring as uni
__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
- 'Python2Lexer', 'Python2TracebackLexer',
- 'CythonLexer', 'DgLexer', 'NumPyLexer']
+ 'Python2Lexer', 'Python2TracebackLexer',
+ 'CythonLexer', 'DgLexer', 'NumPyLexer']
line_re = re.compile('.*?\n')
class PythonLexer(RegexLexer):
"""
- For `Python <http://www.python.org>`_ source code (version 3.x).
-
- .. versionadded:: 0.10
-
- .. versionchanged:: 2.5
- This is now the default ``PythonLexer``. It is still available as the
- alias ``Python3Lexer``.
+ For `Python <http://www.python.org>`_ source code (version 3.x).
+
+ .. versionadded:: 0.10
+
+ .. versionchanged:: 2.5
+ This is now the default ``PythonLexer``. It is still available as the
+ alias ``Python3Lexer``.
"""
name = 'Python'
- aliases = ['python', 'py', 'sage', 'python3', 'py3']
- filenames = [
- '*.py',
- '*.pyw',
- # Jython
- '*.jy',
- # Sage
- '*.sage',
- # SCons
- '*.sc',
- 'SConstruct',
- 'SConscript',
- # Skylark/Starlark (used by Bazel, Buck, and Pants)
- '*.bzl',
- 'BUCK',
- 'BUILD',
- 'BUILD.bazel',
- 'WORKSPACE',
- # Twisted Application infrastructure
- '*.tac',
- ]
- mimetypes = ['text/x-python', 'application/x-python',
- 'text/x-python3', 'application/x-python3']
-
- flags = re.MULTILINE | re.UNICODE
-
- uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
-
+ aliases = ['python', 'py', 'sage', 'python3', 'py3']
+ filenames = [
+ '*.py',
+ '*.pyw',
+ # Jython
+ '*.jy',
+ # Sage
+ '*.sage',
+ # SCons
+ '*.sc',
+ 'SConstruct',
+ 'SConscript',
+ # Skylark/Starlark (used by Bazel, Buck, and Pants)
+ '*.bzl',
+ 'BUCK',
+ 'BUILD',
+ 'BUILD.bazel',
+ 'WORKSPACE',
+ # Twisted Application infrastructure
+ '*.tac',
+ ]
+ mimetypes = ['text/x-python', 'application/x-python',
+ 'text/x-python3', 'application/x-python3']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
+
def innerstring_rules(ttype):
return [
- # the old style '%s' % (...) string formatting (still valid in Py3)
- (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[E-GXc-giorsaux%]', String.Interpol),
- # the new style '{}'.format(...) string formatting
- (r'\{'
- r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
- r'(\![sra])?' # conversion
- r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
- r'\}', String.Interpol),
-
- # backslashes, quotes and formatting signs must be parsed one at a time
- (r'[^\\\'"%{\n]+', ttype),
- (r'[\'"\\]', ttype),
- # unhandled string formatting sign
- (r'%|(\{{1,2})', ttype)
- # newlines are an error (use "nl" state)
- ]
-
- def fstring_rules(ttype):
- return [
- # Assuming that a '}' is the closing brace after format specifier.
- # Sadly, this means that we won't detect syntax error. But it's
- # more important to parse correct syntax correctly, than to
- # highlight invalid syntax.
- (r'\}', String.Interpol),
- (r'\{', String.Interpol, 'expr-inside-fstring'),
- # backslashes, quotes and formatting signs must be parsed one at a time
- (r'[^\\\'"{}\n]+', ttype),
- (r'[\'"\\]', ttype),
- # newlines are an error (use "nl" state)
- ]
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
- bygroups(Text, String.Affix, String.Doc)),
- (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
- bygroups(Text, String.Affix, String.Doc)),
- (r'\A#!.+$', Comment.Hashbang),
- (r'#.*$', Comment.Single),
- (r'\\\n', Text),
- (r'\\', Text),
- include('keywords'),
+ # the old style '%s' % (...) string formatting (still valid in Py3)
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[E-GXc-giorsaux%]', String.Interpol),
+ # the new style '{}'.format(...) string formatting
+ (r'\{'
+ r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
+ r'(\![sra])?' # conversion
+ r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
+ r'\}', String.Interpol),
+
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"%{\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ # unhandled string formatting sign
+ (r'%|(\{{1,2})', ttype)
+ # newlines are an error (use "nl" state)
+ ]
+
+ def fstring_rules(ttype):
+ return [
+ # Assuming that a '}' is the closing brace after format specifier.
+ # Sadly, this means that we won't detect syntax error. But it's
+ # more important to parse correct syntax correctly, than to
+ # highlight invalid syntax.
+ (r'\}', String.Interpol),
+ (r'\{', String.Interpol, 'expr-inside-fstring'),
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"{}\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ # newlines are an error (use "nl" state)
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
+ bygroups(Text, String.Affix, String.Doc)),
+ (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
+ bygroups(Text, String.Affix, String.Doc)),
+ (r'\A#!.+$', Comment.Hashbang),
+ (r'#.*$', Comment.Single),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ include('keywords'),
include('soft-keywords'),
- (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
- (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'fromimport'),
- (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'import'),
- include('expr'),
- ],
- 'expr': [
- # raw f-strings
- ('(?i)(rf|fr)(""")',
- bygroups(String.Affix, String.Double),
- combined('rfstringescape', 'tdqf')),
- ("(?i)(rf|fr)(''')",
- bygroups(String.Affix, String.Single),
- combined('rfstringescape', 'tsqf')),
- ('(?i)(rf|fr)(")',
- bygroups(String.Affix, String.Double),
- combined('rfstringescape', 'dqf')),
- ("(?i)(rf|fr)(')",
- bygroups(String.Affix, String.Single),
- combined('rfstringescape', 'sqf')),
- # non-raw f-strings
- ('([fF])(""")', bygroups(String.Affix, String.Double),
- combined('fstringescape', 'tdqf')),
- ("([fF])(''')", bygroups(String.Affix, String.Single),
- combined('fstringescape', 'tsqf')),
- ('([fF])(")', bygroups(String.Affix, String.Double),
- combined('fstringescape', 'dqf')),
- ("([fF])(')", bygroups(String.Affix, String.Single),
- combined('fstringescape', 'sqf')),
- # raw strings
- ('(?i)(rb|br|r)(""")',
- bygroups(String.Affix, String.Double), 'tdqs'),
- ("(?i)(rb|br|r)(''')",
- bygroups(String.Affix, String.Single), 'tsqs'),
- ('(?i)(rb|br|r)(")',
- bygroups(String.Affix, String.Double), 'dqs'),
- ("(?i)(rb|br|r)(')",
- bygroups(String.Affix, String.Single), 'sqs'),
- # non-raw strings
- ('([uUbB]?)(""")', bygroups(String.Affix, String.Double),
- combined('stringescape', 'tdqs')),
- ("([uUbB]?)(''')", bygroups(String.Affix, String.Single),
- combined('stringescape', 'tsqs')),
- ('([uUbB]?)(")', bygroups(String.Affix, String.Double),
- combined('stringescape', 'dqs')),
- ("([uUbB]?)(')", bygroups(String.Affix, String.Single),
- combined('stringescape', 'sqs')),
- (r'[^\S\n]+', Text),
- include('numbers'),
- (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|.]', Operator),
- (r'[]{}:(),;[]', Punctuation),
- (r'(in|is|and|or|not)\b', Operator.Word),
- include('expr-keywords'),
- include('builtins'),
- include('magicfuncs'),
- include('magicvars'),
- include('name'),
- ],
- 'expr-inside-fstring': [
- (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
- # without format specifier
- (r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
- r'(\![sraf])?' # conversion
- r'\}', String.Interpol, '#pop'),
- # with format specifier
- # we'll catch the remaining '}' in the outer scope
- (r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
- r'(\![sraf])?' # conversion
- r':', String.Interpol, '#pop'),
- (r'\s+', Text), # allow new lines
- include('expr'),
- ],
- 'expr-inside-fstring-inner': [
- (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
- (r'[])}]', Punctuation, '#pop'),
- (r'\s+', Text), # allow new lines
- include('expr'),
- ],
- 'expr-keywords': [
- # Based on https://docs.python.org/3/reference/expressions.html
- (words((
- 'async for', 'await', 'else', 'for', 'if', 'lambda',
- 'yield', 'yield from'), suffix=r'\b'),
- Keyword),
- (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
- ],
- 'keywords': [
- (words((
- 'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
- 'else', 'except', 'finally', 'for', 'global', 'if', 'lambda',
- 'pass', 'raise', 'nonlocal', 'return', 'try', 'while', 'yield',
- 'yield from', 'as', 'with'), suffix=r'\b'),
- Keyword),
- (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
- ],
+ (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
+ (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ 'fromimport'),
+ (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ 'import'),
+ include('expr'),
+ ],
+ 'expr': [
+ # raw f-strings
+ ('(?i)(rf|fr)(""")',
+ bygroups(String.Affix, String.Double),
+ combined('rfstringescape', 'tdqf')),
+ ("(?i)(rf|fr)(''')",
+ bygroups(String.Affix, String.Single),
+ combined('rfstringescape', 'tsqf')),
+ ('(?i)(rf|fr)(")',
+ bygroups(String.Affix, String.Double),
+ combined('rfstringescape', 'dqf')),
+ ("(?i)(rf|fr)(')",
+ bygroups(String.Affix, String.Single),
+ combined('rfstringescape', 'sqf')),
+ # non-raw f-strings
+ ('([fF])(""")', bygroups(String.Affix, String.Double),
+ combined('fstringescape', 'tdqf')),
+ ("([fF])(''')", bygroups(String.Affix, String.Single),
+ combined('fstringescape', 'tsqf')),
+ ('([fF])(")', bygroups(String.Affix, String.Double),
+ combined('fstringescape', 'dqf')),
+ ("([fF])(')", bygroups(String.Affix, String.Single),
+ combined('fstringescape', 'sqf')),
+ # raw strings
+ ('(?i)(rb|br|r)(""")',
+ bygroups(String.Affix, String.Double), 'tdqs'),
+ ("(?i)(rb|br|r)(''')",
+ bygroups(String.Affix, String.Single), 'tsqs'),
+ ('(?i)(rb|br|r)(")',
+ bygroups(String.Affix, String.Double), 'dqs'),
+ ("(?i)(rb|br|r)(')",
+ bygroups(String.Affix, String.Single), 'sqs'),
+ # non-raw strings
+ ('([uUbB]?)(""")', bygroups(String.Affix, String.Double),
+ combined('stringescape', 'tdqs')),
+ ("([uUbB]?)(''')", bygroups(String.Affix, String.Single),
+ combined('stringescape', 'tsqs')),
+ ('([uUbB]?)(")', bygroups(String.Affix, String.Double),
+ combined('stringescape', 'dqs')),
+ ("([uUbB]?)(')", bygroups(String.Affix, String.Single),
+ combined('stringescape', 'sqs')),
+ (r'[^\S\n]+', Text),
+ include('numbers'),
+ (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|.]', Operator),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ include('expr-keywords'),
+ include('builtins'),
+ include('magicfuncs'),
+ include('magicvars'),
+ include('name'),
+ ],
+ 'expr-inside-fstring': [
+ (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
+ # without format specifier
+ (r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
+ r'(\![sraf])?' # conversion
+ r'\}', String.Interpol, '#pop'),
+ # with format specifier
+ # we'll catch the remaining '}' in the outer scope
+ (r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
+ r'(\![sraf])?' # conversion
+ r':', String.Interpol, '#pop'),
+ (r'\s+', Text), # allow new lines
+ include('expr'),
+ ],
+ 'expr-inside-fstring-inner': [
+ (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
+ (r'[])}]', Punctuation, '#pop'),
+ (r'\s+', Text), # allow new lines
+ include('expr'),
+ ],
+ 'expr-keywords': [
+ # Based on https://docs.python.org/3/reference/expressions.html
+ (words((
+ 'async for', 'await', 'else', 'for', 'if', 'lambda',
+ 'yield', 'yield from'), suffix=r'\b'),
+ Keyword),
+ (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
+ ],
+ 'keywords': [
+ (words((
+ 'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
+ 'else', 'except', 'finally', 'for', 'global', 'if', 'lambda',
+ 'pass', 'raise', 'nonlocal', 'return', 'try', 'while', 'yield',
+ 'yield from', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
+ ],
'soft-keywords': [
# `match`, `case` and `_` soft keywords
(r'(^[ \t]*)' # at beginning of line + possible indentation
@@ -223,201 +223,201 @@ class PythonLexer(RegexLexer):
(r'(\s+)([^\n_]*)(_\b)', bygroups(Text, using(this), Keyword)),
default('#pop')
],
- 'builtins': [
- (words((
- '__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray',
+ 'builtins': [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray',
'breakpoint', 'bytes', 'chr', 'classmethod', 'compile', 'complex',
- 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'filter',
- 'float', 'format', 'frozenset', 'getattr', 'globals', 'hasattr',
- 'hash', 'hex', 'id', 'input', 'int', 'isinstance', 'issubclass',
- 'iter', 'len', 'list', 'locals', 'map', 'max', 'memoryview',
- 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print',
- 'property', 'range', 'repr', 'reversed', 'round', 'set', 'setattr',
- 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple',
- 'type', 'vars', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Builtin),
- (r'(?<!\.)(self|Ellipsis|NotImplemented|cls)\b', Name.Builtin.Pseudo),
- (words((
- 'ArithmeticError', 'AssertionError', 'AttributeError',
- 'BaseException', 'BufferError', 'BytesWarning', 'DeprecationWarning',
- 'EOFError', 'EnvironmentError', 'Exception', 'FloatingPointError',
- 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError',
- 'ImportWarning', 'IndentationError', 'IndexError', 'KeyError',
- 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',
- 'NotImplementedError', 'OSError', 'OverflowError',
- 'PendingDeprecationWarning', 'ReferenceError', 'ResourceWarning',
- 'RuntimeError', 'RuntimeWarning', 'StopIteration',
- 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
- 'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
- 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
- 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError',
- 'Warning', 'WindowsError', 'ZeroDivisionError',
- # new builtin exceptions from PEP 3151
- 'BlockingIOError', 'ChildProcessError', 'ConnectionError',
- 'BrokenPipeError', 'ConnectionAbortedError', 'ConnectionRefusedError',
- 'ConnectionResetError', 'FileExistsError', 'FileNotFoundError',
- 'InterruptedError', 'IsADirectoryError', 'NotADirectoryError',
- 'PermissionError', 'ProcessLookupError', 'TimeoutError',
- # others new in Python 3
- 'StopAsyncIteration', 'ModuleNotFoundError', 'RecursionError'),
- prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Exception),
- ],
- 'magicfuncs': [
- (words((
- '__abs__', '__add__', '__aenter__', '__aexit__', '__aiter__',
- '__and__', '__anext__', '__await__', '__bool__', '__bytes__',
- '__call__', '__complex__', '__contains__', '__del__', '__delattr__',
- '__delete__', '__delitem__', '__dir__', '__divmod__', '__enter__',
- '__eq__', '__exit__', '__float__', '__floordiv__', '__format__',
- '__ge__', '__get__', '__getattr__', '__getattribute__',
- '__getitem__', '__gt__', '__hash__', '__iadd__', '__iand__',
- '__ifloordiv__', '__ilshift__', '__imatmul__', '__imod__',
- '__imul__', '__index__', '__init__', '__instancecheck__',
- '__int__', '__invert__', '__ior__', '__ipow__', '__irshift__',
- '__isub__', '__iter__', '__itruediv__', '__ixor__', '__le__',
- '__len__', '__length_hint__', '__lshift__', '__lt__', '__matmul__',
- '__missing__', '__mod__', '__mul__', '__ne__', '__neg__',
- '__new__', '__next__', '__or__', '__pos__', '__pow__',
- '__prepare__', '__radd__', '__rand__', '__rdivmod__', '__repr__',
- '__reversed__', '__rfloordiv__', '__rlshift__', '__rmatmul__',
- '__rmod__', '__rmul__', '__ror__', '__round__', '__rpow__',
- '__rrshift__', '__rshift__', '__rsub__', '__rtruediv__',
- '__rxor__', '__set__', '__setattr__', '__setitem__', '__str__',
- '__sub__', '__subclasscheck__', '__truediv__',
- '__xor__'), suffix=r'\b'),
- Name.Function.Magic),
- ],
- 'magicvars': [
- (words((
- '__annotations__', '__bases__', '__class__', '__closure__',
- '__code__', '__defaults__', '__dict__', '__doc__', '__file__',
- '__func__', '__globals__', '__kwdefaults__', '__module__',
- '__mro__', '__name__', '__objclass__', '__qualname__',
- '__self__', '__slots__', '__weakref__'), suffix=r'\b'),
- Name.Variable.Magic),
- ],
- 'numbers': [
- (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)'
- r'([eE][+-]?\d(?:_?\d)*)?', Number.Float),
- (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
- (r'0[oO](?:_?[0-7])+', Number.Oct),
- (r'0[bB](?:_?[01])+', Number.Bin),
- (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
- (r'\d(?:_?\d)*', Number.Integer),
- ],
- 'name': [
- (r'@' + uni_name, Name.Decorator),
- (r'@', Operator), # new matrix multiplication operator
- (uni_name, Name),
- ],
- 'funcname': [
- include('magicfuncs'),
- (uni_name, Name.Function, '#pop'),
- default('#pop'),
- ],
- 'classname': [
- (uni_name, Name.Class, '#pop'),
- ],
- 'import': [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
- (r'\.', Name.Namespace),
- (uni_name, Name.Namespace),
- (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
- default('#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(\s+)(import)\b', bygroups(Text, Keyword.Namespace), '#pop'),
- (r'\.', Name.Namespace),
- # if None occurs here, it's "raise x from None", since None can
- # never be a module name
- (r'None\b', Name.Builtin.Pseudo, '#pop'),
- (uni_name, Name.Namespace),
- default('#pop'),
- ],
- 'rfstringescape': [
- (r'\{\{', String.Escape),
- (r'\}\}', String.Escape),
- ],
- 'fstringescape': [
- include('rfstringescape'),
- include('stringescape'),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'fstrings-single': fstring_rules(String.Single),
- 'fstrings-double': fstring_rules(String.Double),
- 'strings-single': innerstring_rules(String.Single),
- 'strings-double': innerstring_rules(String.Double),
- 'dqf': [
- (r'"', String.Double, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include('fstrings-double')
- ],
- 'sqf': [
- (r"'", String.Single, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include('fstrings-single')
- ],
- 'dqs': [
- (r'"', String.Double, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include('strings-double')
- ],
- 'sqs': [
- (r"'", String.Single, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include('strings-single')
- ],
- 'tdqf': [
- (r'"""', String.Double, '#pop'),
- include('fstrings-double'),
- (r'\n', String.Double)
- ],
- 'tsqf': [
- (r"'''", String.Single, '#pop'),
- include('fstrings-single'),
- (r'\n', String.Single)
- ],
- 'tdqs': [
- (r'"""', String.Double, '#pop'),
- include('strings-double'),
- (r'\n', String.Double)
- ],
- 'tsqs': [
- (r"'''", String.Single, '#pop'),
- include('strings-single'),
- (r'\n', String.Single)
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'pythonw?(3(\.\d)?)?') or \
- 'import ' in text[:1000]
-
-
-Python3Lexer = PythonLexer
-
-
-class Python2Lexer(RegexLexer):
- """
- For `Python 2.x <http://www.python.org>`_ source code.
-
- .. versionchanged:: 2.5
- This class has been renamed from ``PythonLexer``. ``PythonLexer`` now
- refers to the Python 3 variant. File name patterns like ``*.py`` have
- been moved to Python 3 as well.
- """
-
- name = 'Python 2.x'
- aliases = ['python2', 'py2']
- filenames = [] # now taken over by PythonLexer (3.x)
- mimetypes = ['text/x-python2', 'application/x-python2']
-
- def innerstring_rules(ttype):
- return [
+ 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'filter',
+ 'float', 'format', 'frozenset', 'getattr', 'globals', 'hasattr',
+ 'hash', 'hex', 'id', 'input', 'int', 'isinstance', 'issubclass',
+ 'iter', 'len', 'list', 'locals', 'map', 'max', 'memoryview',
+ 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print',
+ 'property', 'range', 'repr', 'reversed', 'round', 'set', 'setattr',
+ 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple',
+ 'type', 'vars', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
+ (r'(?<!\.)(self|Ellipsis|NotImplemented|cls)\b', Name.Builtin.Pseudo),
+ (words((
+ 'ArithmeticError', 'AssertionError', 'AttributeError',
+ 'BaseException', 'BufferError', 'BytesWarning', 'DeprecationWarning',
+ 'EOFError', 'EnvironmentError', 'Exception', 'FloatingPointError',
+ 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError',
+ 'ImportWarning', 'IndentationError', 'IndexError', 'KeyError',
+ 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',
+ 'NotImplementedError', 'OSError', 'OverflowError',
+ 'PendingDeprecationWarning', 'ReferenceError', 'ResourceWarning',
+ 'RuntimeError', 'RuntimeWarning', 'StopIteration',
+ 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
+ 'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
+ 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
+ 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError',
+ 'Warning', 'WindowsError', 'ZeroDivisionError',
+ # new builtin exceptions from PEP 3151
+ 'BlockingIOError', 'ChildProcessError', 'ConnectionError',
+ 'BrokenPipeError', 'ConnectionAbortedError', 'ConnectionRefusedError',
+ 'ConnectionResetError', 'FileExistsError', 'FileNotFoundError',
+ 'InterruptedError', 'IsADirectoryError', 'NotADirectoryError',
+ 'PermissionError', 'ProcessLookupError', 'TimeoutError',
+ # others new in Python 3
+ 'StopAsyncIteration', 'ModuleNotFoundError', 'RecursionError'),
+ prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Exception),
+ ],
+ 'magicfuncs': [
+ (words((
+ '__abs__', '__add__', '__aenter__', '__aexit__', '__aiter__',
+ '__and__', '__anext__', '__await__', '__bool__', '__bytes__',
+ '__call__', '__complex__', '__contains__', '__del__', '__delattr__',
+ '__delete__', '__delitem__', '__dir__', '__divmod__', '__enter__',
+ '__eq__', '__exit__', '__float__', '__floordiv__', '__format__',
+ '__ge__', '__get__', '__getattr__', '__getattribute__',
+ '__getitem__', '__gt__', '__hash__', '__iadd__', '__iand__',
+ '__ifloordiv__', '__ilshift__', '__imatmul__', '__imod__',
+ '__imul__', '__index__', '__init__', '__instancecheck__',
+ '__int__', '__invert__', '__ior__', '__ipow__', '__irshift__',
+ '__isub__', '__iter__', '__itruediv__', '__ixor__', '__le__',
+ '__len__', '__length_hint__', '__lshift__', '__lt__', '__matmul__',
+ '__missing__', '__mod__', '__mul__', '__ne__', '__neg__',
+ '__new__', '__next__', '__or__', '__pos__', '__pow__',
+ '__prepare__', '__radd__', '__rand__', '__rdivmod__', '__repr__',
+ '__reversed__', '__rfloordiv__', '__rlshift__', '__rmatmul__',
+ '__rmod__', '__rmul__', '__ror__', '__round__', '__rpow__',
+ '__rrshift__', '__rshift__', '__rsub__', '__rtruediv__',
+ '__rxor__', '__set__', '__setattr__', '__setitem__', '__str__',
+ '__sub__', '__subclasscheck__', '__truediv__',
+ '__xor__'), suffix=r'\b'),
+ Name.Function.Magic),
+ ],
+ 'magicvars': [
+ (words((
+ '__annotations__', '__bases__', '__class__', '__closure__',
+ '__code__', '__defaults__', '__dict__', '__doc__', '__file__',
+ '__func__', '__globals__', '__kwdefaults__', '__module__',
+ '__mro__', '__name__', '__objclass__', '__qualname__',
+ '__self__', '__slots__', '__weakref__'), suffix=r'\b'),
+ Name.Variable.Magic),
+ ],
+ 'numbers': [
+ (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)'
+ r'([eE][+-]?\d(?:_?\d)*)?', Number.Float),
+ (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
+ (r'0[oO](?:_?[0-7])+', Number.Oct),
+ (r'0[bB](?:_?[01])+', Number.Bin),
+ (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
+ (r'\d(?:_?\d)*', Number.Integer),
+ ],
+ 'name': [
+ (r'@' + uni_name, Name.Decorator),
+ (r'@', Operator), # new matrix multiplication operator
+ (uni_name, Name),
+ ],
+ 'funcname': [
+ include('magicfuncs'),
+ (uni_name, Name.Function, '#pop'),
+ default('#pop'),
+ ],
+ 'classname': [
+ (uni_name, Name.Class, '#pop'),
+ ],
+ 'import': [
+ (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'\.', Name.Namespace),
+ (uni_name, Name.Namespace),
+ (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
+ default('#pop') # all else: go back
+ ],
+ 'fromimport': [
+ (r'(\s+)(import)\b', bygroups(Text, Keyword.Namespace), '#pop'),
+ (r'\.', Name.Namespace),
+ # if None occurs here, it's "raise x from None", since None can
+ # never be a module name
+ (r'None\b', Name.Builtin.Pseudo, '#pop'),
+ (uni_name, Name.Namespace),
+ default('#pop'),
+ ],
+ 'rfstringescape': [
+ (r'\{\{', String.Escape),
+ (r'\}\}', String.Escape),
+ ],
+ 'fstringescape': [
+ include('rfstringescape'),
+ include('stringescape'),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'fstrings-single': fstring_rules(String.Single),
+ 'fstrings-double': fstring_rules(String.Double),
+ 'strings-single': innerstring_rules(String.Single),
+ 'strings-double': innerstring_rules(String.Double),
+ 'dqf': [
+ (r'"', String.Double, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ include('fstrings-double')
+ ],
+ 'sqf': [
+ (r"'", String.Single, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
+ include('fstrings-single')
+ ],
+ 'dqs': [
+ (r'"', String.Double, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ include('strings-double')
+ ],
+ 'sqs': [
+ (r"'", String.Single, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
+ include('strings-single')
+ ],
+ 'tdqf': [
+ (r'"""', String.Double, '#pop'),
+ include('fstrings-double'),
+ (r'\n', String.Double)
+ ],
+ 'tsqf': [
+ (r"'''", String.Single, '#pop'),
+ include('fstrings-single'),
+ (r'\n', String.Single)
+ ],
+ 'tdqs': [
+ (r'"""', String.Double, '#pop'),
+ include('strings-double'),
+ (r'\n', String.Double)
+ ],
+ 'tsqs': [
+ (r"'''", String.Single, '#pop'),
+ include('strings-single'),
+ (r'\n', String.Single)
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'pythonw?(3(\.\d)?)?') or \
+ 'import ' in text[:1000]
+
+
+Python3Lexer = PythonLexer
+
+
+class Python2Lexer(RegexLexer):
+ """
+ For `Python 2.x <http://www.python.org>`_ source code.
+
+ .. versionchanged:: 2.5
+ This class has been renamed from ``PythonLexer``. ``PythonLexer`` now
+ refers to the Python 3 variant. File name patterns like ``*.py`` have
+ been moved to Python 3 as well.
+ """
+
+ name = 'Python 2.x'
+ aliases = ['python2', 'py2']
+ filenames = [] # now taken over by PythonLexer (3.x)
+ mimetypes = ['text/x-python2', 'application/x-python2']
+
+ def innerstring_rules(ttype):
+ return [
# the old style '%s' % (...) string formatting
(r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
@@ -505,15 +505,15 @@ class Python2Lexer(RegexLexer):
'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
- 'MemoryError', 'NameError',
- 'NotImplementedError', 'OSError', 'OverflowError', 'OverflowWarning',
- 'PendingDeprecationWarning', 'ReferenceError',
- 'RuntimeError', 'RuntimeWarning', 'StandardError', 'StopIteration',
- 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
- 'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
- 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
- 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',
- 'WindowsError', 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
+ 'MemoryError', 'NameError',
+ 'NotImplementedError', 'OSError', 'OverflowError', 'OverflowWarning',
+ 'PendingDeprecationWarning', 'ReferenceError',
+ 'RuntimeError', 'RuntimeWarning', 'StandardError', 'StopIteration',
+ 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
+ 'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
+ 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
+ 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',
+ 'WindowsError', 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
Name.Exception),
],
'magicfuncs': [
@@ -619,7 +619,7 @@ class Python2Lexer(RegexLexer):
}
def analyse_text(text):
- return shebang_matches(text, r'pythonw?2(\.\d)?')
+ return shebang_matches(text, r'pythonw?2(\.\d)?')
class PythonConsoleLexer(Lexer):
@@ -639,27 +639,27 @@ class PythonConsoleLexer(Lexer):
Additional options:
`python3`
- Use Python 3 lexer for code. Default is ``True``.
+ Use Python 3 lexer for code. Default is ``True``.
.. versionadded:: 1.0
- .. versionchanged:: 2.5
- Now defaults to ``True``.
+ .. versionchanged:: 2.5
+ Now defaults to ``True``.
"""
name = 'Python console session'
aliases = ['pycon']
mimetypes = ['text/x-python-doctest']
def __init__(self, **options):
- self.python3 = get_bool_opt(options, 'python3', True)
+ self.python3 = get_bool_opt(options, 'python3', True)
Lexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
if self.python3:
pylexer = PythonLexer(**self.options)
tblexer = PythonTracebackLexer(**self.options)
- else:
- pylexer = Python2Lexer(**self.options)
- tblexer = Python2TracebackLexer(**self.options)
+ else:
+ pylexer = Python2Lexer(**self.options)
+ tblexer = Python2TracebackLexer(**self.options)
curcode = ''
insertions = []
@@ -668,26 +668,26 @@ class PythonConsoleLexer(Lexer):
tb = 0
for match in line_re.finditer(text):
line = match.group()
- if line.startswith('>>> ') or line.startswith('... '):
+ if line.startswith('>>> ') or line.startswith('... '):
tb = 0
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:4])]))
curcode += line[4:]
- elif line.rstrip() == '...' and not tb:
+ elif line.rstrip() == '...' and not tb:
# only a new >>> prompt can end an exception block
# otherwise an ellipsis in place of the traceback frames
# will be mishandled
insertions.append((len(curcode),
- [(0, Generic.Prompt, '...')]))
+ [(0, Generic.Prompt, '...')]))
curcode += line[3:]
else:
if curcode:
- yield from do_insertions(
- insertions, pylexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(
+ insertions, pylexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
- if (line.startswith('Traceback (most recent call last):') or
- re.match(' File "[^"]+", line \\d+\\n$', line)):
+ if (line.startswith('Traceback (most recent call last):') or
+ re.match(' File "[^"]+", line \\d+\\n$', line)):
tb = 1
curtb = line
tbindex = match.start()
@@ -695,7 +695,7 @@ class PythonConsoleLexer(Lexer):
yield match.start(), Name.Class, line
elif tb:
curtb += line
- if not (line.startswith(' ') or line.strip() == '...'):
+ if not (line.startswith(' ') or line.strip() == '...'):
tb = 0
for i, t, v in tblexer.get_tokens_unprocessed(curtb):
yield tbindex+i, t, v
@@ -703,8 +703,8 @@ class PythonConsoleLexer(Lexer):
else:
yield match.start(), Generic.Output, line
if curcode:
- yield from do_insertions(insertions,
- pylexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(insertions,
+ pylexer.get_tokens_unprocessed(curcode))
if curtb:
for i, t, v in tblexer.get_tokens_unprocessed(curtb):
yield tbindex+i, t, v
@@ -712,28 +712,28 @@ class PythonConsoleLexer(Lexer):
class PythonTracebackLexer(RegexLexer):
"""
- For Python 3.x tracebacks, with support for chained exceptions.
-
- .. versionadded:: 1.0
+ For Python 3.x tracebacks, with support for chained exceptions.
- .. versionchanged:: 2.5
- This is now the default ``PythonTracebackLexer``. It is still available
- as the alias ``Python3TracebackLexer``.
+ .. versionadded:: 1.0
+
+ .. versionchanged:: 2.5
+ This is now the default ``PythonTracebackLexer``. It is still available
+ as the alias ``Python3TracebackLexer``.
"""
name = 'Python Traceback'
- aliases = ['pytb', 'py3tb']
- filenames = ['*.pytb', '*.py3tb']
- mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
+ aliases = ['pytb', 'py3tb']
+ filenames = ['*.pytb', '*.py3tb']
+ mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
tokens = {
'root': [
- (r'\n', Text),
- (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
- (r'^During handling of the above exception, another '
- r'exception occurred:\n\n', Generic.Traceback),
- (r'^The above exception was the direct cause of the '
- r'following exception:\n\n', Generic.Traceback),
+ (r'\n', Text),
+ (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
+ (r'^During handling of the above exception, another '
+ r'exception occurred:\n\n', Generic.Traceback),
+ (r'^The above exception was the direct cause of the '
+ r'following exception:\n\n', Generic.Traceback),
(r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
(r'^.*\n', Other),
],
@@ -743,54 +743,54 @@ class PythonTracebackLexer(RegexLexer):
(r'^( File )("[^"]+")(, line )(\d+)(\n)',
bygroups(Text, Name.Builtin, Text, Number, Text)),
(r'^( )(.+)(\n)',
- bygroups(Text, using(PythonLexer), Text), 'markers'),
+ bygroups(Text, using(PythonLexer), Text), 'markers'),
(r'^([ \t]*)(\.\.\.)(\n)',
bygroups(Text, Comment, Text)), # for doctests...
(r'^([^:]+)(: )(.+)(\n)',
bygroups(Generic.Error, Text, Name, Text), '#pop'),
- (r'^([a-zA-Z_][\w.]*)(:?\n)',
+ (r'^([a-zA-Z_][\w.]*)(:?\n)',
bygroups(Generic.Error, Text), '#pop')
],
- 'markers': [
- # Either `PEP 657 <https://www.python.org/dev/peps/pep-0657/>`
- # error locations in Python 3.11+, or single-caret markers
- # for syntax errors before that.
+ 'markers': [
+ # Either `PEP 657 <https://www.python.org/dev/peps/pep-0657/>`
+ # error locations in Python 3.11+, or single-caret markers
+ # for syntax errors before that.
(r'^( {4,})([~^]+)(\n)',
- bygroups(Text, Punctuation.Marker, Text),
- '#pop'),
- default('#pop'),
- ],
+ bygroups(Text, Punctuation.Marker, Text),
+ '#pop'),
+ default('#pop'),
+ ],
}
-Python3TracebackLexer = PythonTracebackLexer
-
-
-class Python2TracebackLexer(RegexLexer):
+Python3TracebackLexer = PythonTracebackLexer
+
+
+class Python2TracebackLexer(RegexLexer):
"""
- For Python tracebacks.
+ For Python tracebacks.
- .. versionadded:: 0.7
-
- .. versionchanged:: 2.5
- This class has been renamed from ``PythonTracebackLexer``.
- ``PythonTracebackLexer`` now refers to the Python 3 variant.
+ .. versionadded:: 0.7
+
+ .. versionchanged:: 2.5
+ This class has been renamed from ``PythonTracebackLexer``.
+ ``PythonTracebackLexer`` now refers to the Python 3 variant.
"""
- name = 'Python 2.x Traceback'
- aliases = ['py2tb']
- filenames = ['*.py2tb']
- mimetypes = ['text/x-python2-traceback']
+ name = 'Python 2.x Traceback'
+ aliases = ['py2tb']
+ filenames = ['*.py2tb']
+ mimetypes = ['text/x-python2-traceback']
tokens = {
'root': [
- # Cover both (most recent call last) and (innermost last)
- # The optional ^C allows us to catch keyboard interrupt signals.
- (r'^(\^C)?(Traceback.*\n)',
- bygroups(Text, Generic.Traceback), 'intb'),
- # SyntaxError starts with this.
+ # Cover both (most recent call last) and (innermost last)
+ # The optional ^C allows us to catch keyboard interrupt signals.
+ (r'^(\^C)?(Traceback.*\n)',
+ bygroups(Text, Generic.Traceback), 'intb'),
+ # SyntaxError starts with this.
(r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
- (r'^.*\n', Other),
+ (r'^.*\n', Other),
],
'intb': [
(r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
@@ -798,7 +798,7 @@ class Python2TracebackLexer(RegexLexer):
(r'^( File )("[^"]+")(, line )(\d+)(\n)',
bygroups(Text, Name.Builtin, Text, Number, Text)),
(r'^( )(.+)(\n)',
- bygroups(Text, using(Python2Lexer), Text), 'marker'),
+ bygroups(Text, using(Python2Lexer), Text), 'marker'),
(r'^([ \t]*)(\.\.\.)(\n)',
bygroups(Text, Comment, Text)), # for doctests...
(r'^([^:]+)(: )(.+)(\n)',
@@ -806,11 +806,11 @@ class Python2TracebackLexer(RegexLexer):
(r'^([a-zA-Z_]\w*)(:?\n)',
bygroups(Generic.Error, Text), '#pop')
],
- 'marker': [
- # For syntax errors.
- (r'( {4,})(\^)', bygroups(Text, Punctuation.Marker), '#pop'),
- default('#pop'),
- ],
+ 'marker': [
+ # For syntax errors.
+ (r'( {4,})(\^)', bygroups(Text, Punctuation.Marker), '#pop'),
+ default('#pop'),
+ ],
}
@@ -866,7 +866,7 @@ class CythonLexer(RegexLexer):
],
'keywords': [
(words((
- 'assert', 'async', 'await', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
+ 'assert', 'async', 'await', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil',
'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
@@ -875,14 +875,14 @@ class CythonLexer(RegexLexer):
],
'builtins': [
(words((
- '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', 'bint',
+ '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', 'bint',
'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
- 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', 'Py_ssize_t',
+ 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', 'Py_ssize_t',
'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned',
@@ -1106,7 +1106,7 @@ class NumPyLexer(PythonLexer):
mimetypes = []
filenames = []
- EXTRA_KEYWORDS = {
+ EXTRA_KEYWORDS = {
'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
@@ -1171,7 +1171,7 @@ class NumPyLexer(PythonLexer):
'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
- }
+ }
def get_tokens_unprocessed(self, text):
for index, token, value in \
@@ -1182,7 +1182,7 @@ class NumPyLexer(PythonLexer):
yield index, token, value
def analyse_text(text):
- ltext = text[:1000]
- return (shebang_matches(text, r'pythonw?(3(\.\d)?)?') or
- 'import ' in ltext) \
- and ('import numpy' in ltext or 'from numpy import' in ltext)
+ ltext = text[:1000]
+ return (shebang_matches(text, r'pythonw?(3(\.\d)?)?') or
+ 'import ' in ltext) \
+ and ('import numpy' in ltext or 'from numpy import' in ltext)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/qvt.py b/contrib/python/Pygments/py3/pygments/lexers/qvt.py
index 72817f09c1..4e12022853 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/qvt.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/qvt.py
@@ -4,7 +4,7 @@
Lexer for QVT Operational language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,7 +17,7 @@ __all__ = ['QVToLexer']
class QVToLexer(RegexLexer):
- """
+ """
For the `QVT Operational Mapping language <http://www.omg.org/spec/QVT/1.1/>`_.
Reference for implementing this: «Meta Object Facility (MOF) 2.0
diff --git a/contrib/python/Pygments/py3/pygments/lexers/r.py b/contrib/python/Pygments/py3/pygments/lexers/r.py
index 44168a7ad5..4d4fd77ef1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/r.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/r.py
@@ -4,7 +4,7 @@
Lexers for the R/S languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -48,8 +48,8 @@ class RConsoleLexer(Lexer):
# If we have stored prompt lines, need to process them first.
if current_code_block:
# Weave together the prompts and highlight code.
- yield from do_insertions(
- insertions, slexer.get_tokens_unprocessed(current_code_block))
+ yield from do_insertions(
+ insertions, slexer.get_tokens_unprocessed(current_code_block))
# Reset vars for next code block.
current_code_block = ''
insertions = []
@@ -60,8 +60,8 @@ class RConsoleLexer(Lexer):
# process the last code block. This is neither elegant nor DRY so
# should be changed.
if current_code_block:
- yield from do_insertions(
- insertions, slexer.get_tokens_unprocessed(current_code_block))
+ yield from do_insertions(
+ insertions, slexer.get_tokens_unprocessed(current_code_block))
class SLexer(RegexLexer):
@@ -77,7 +77,7 @@ class SLexer(RegexLexer):
mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
'text/x-R', 'text/x-r-history', 'text/x-r-profile']
- valid_name = r'`[^`\\]*(?:\\.[^`\\]*)*`|(?:[a-zA-Z]|\.[A-Za-z_.])[\w.]*|\.'
+ valid_name = r'`[^`\\]*(?:\\.[^`\\]*)*`|(?:[a-zA-Z]|\.[A-Za-z_.])[\w.]*|\.'
tokens = {
'comments': [
(r'#.*$', Comment.Single),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/rdf.py b/contrib/python/Pygments/py3/pygments/lexers/rdf.py
index bd7a4f690c..a6d7a372d8 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/rdf.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/rdf.py
@@ -4,7 +4,7 @@
Lexers for semantic web and RDF query languages and markup.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,12 +14,12 @@ from pygments.lexer import RegexLexer, bygroups, default
from pygments.token import Keyword, Punctuation, String, Number, Operator, Generic, \
Whitespace, Name, Literal, Comment, Text
-__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
+__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
class SparqlLexer(RegexLexer):
"""
- Lexer for `SPARQL <https://www.w3.org/TR/sparql11-query/>`_ query language.
+ Lexer for `SPARQL <https://www.w3.org/TR/sparql11-query/>`_ query language.
.. versionadded:: 2.0
"""
@@ -30,27 +30,27 @@ class SparqlLexer(RegexLexer):
# character group definitions ::
- PN_CHARS_BASE_GRP = ('a-zA-Z'
- '\u00c0-\u00d6'
- '\u00d8-\u00f6'
- '\u00f8-\u02ff'
- '\u0370-\u037d'
- '\u037f-\u1fff'
- '\u200c-\u200d'
- '\u2070-\u218f'
- '\u2c00-\u2fef'
- '\u3001-\ud7ff'
- '\uf900-\ufdcf'
- '\ufdf0-\ufffd')
+ PN_CHARS_BASE_GRP = ('a-zA-Z'
+ '\u00c0-\u00d6'
+ '\u00d8-\u00f6'
+ '\u00f8-\u02ff'
+ '\u0370-\u037d'
+ '\u037f-\u1fff'
+ '\u200c-\u200d'
+ '\u2070-\u218f'
+ '\u2c00-\u2fef'
+ '\u3001-\ud7ff'
+ '\uf900-\ufdcf'
+ '\ufdf0-\ufffd')
PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
PN_CHARS_GRP = (PN_CHARS_U_GRP +
r'\-' +
r'0-9' +
- '\u00b7' +
- '\u0300-\u036f' +
- '\u203f-\u2040')
+ '\u00b7' +
+ '\u0300-\u036f' +
+ '\u203f-\u2040')
HEX_GRP = '0-9A-Fa-f'
@@ -75,8 +75,8 @@ class SparqlLexer(RegexLexer):
PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
- VARNAME = '[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \
- '0-9\u00b7\u0300-\u036f\u203f-\u2040]*'
+ VARNAME = '[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \
+ '0-9\u00b7\u0300-\u036f\u203f-\u2040]*'
PERCENT = '%' + HEX + HEX
@@ -98,10 +98,10 @@ class SparqlLexer(RegexLexer):
# keywords ::
(r'(?i)(select|construct|describe|ask|where|filter|group\s+by|minus|'
r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
- r'offset|values|bindings|load|into|clear|drop|create|add|move|copy|'
- r'insert\s+data|delete\s+data|delete\s+where|with|delete|insert|'
+ r'offset|values|bindings|load|into|clear|drop|create|add|move|copy|'
+ r'insert\s+data|delete\s+data|delete\s+where|with|delete|insert|'
r'using\s+named|using|graph|default|named|all|optional|service|'
- r'silent|bind|undef|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword),
+ r'silent|bind|undef|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword),
(r'(a)\b', Keyword),
# IRIs ::
('(' + IRIREF + ')', Name.Label),
@@ -116,7 +116,7 @@ class SparqlLexer(RegexLexer):
(r'(?i)(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
- r'hours|minutes|seconds|timezone|tz|now|uuid|struuid|md5|sha1|sha256|sha384|'
+ r'hours|minutes|seconds|timezone|tz|now|uuid|struuid|md5|sha1|sha256|sha384|'
r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|'
r'count|sum|min|max|avg|sample|group_concat|separator)\b',
@@ -186,61 +186,61 @@ class TurtleLexer(RegexLexer):
filenames = ['*.ttl']
mimetypes = ['text/turtle', 'application/x-turtle']
- # character group definitions ::
- PN_CHARS_BASE_GRP = ('a-zA-Z'
- '\u00c0-\u00d6'
- '\u00d8-\u00f6'
- '\u00f8-\u02ff'
- '\u0370-\u037d'
- '\u037f-\u1fff'
- '\u200c-\u200d'
- '\u2070-\u218f'
- '\u2c00-\u2fef'
- '\u3001-\ud7ff'
- '\uf900-\ufdcf'
- '\ufdf0-\ufffd')
-
- PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
-
- PN_CHARS_GRP = (PN_CHARS_U_GRP +
- r'\-' +
- r'0-9' +
- '\u00b7' +
- '\u0300-\u036f' +
- '\u203f-\u2040')
-
- PN_CHARS = '[' + PN_CHARS_GRP + ']'
-
- PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
-
- PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
-
- HEX_GRP = '0-9A-Fa-f'
-
- HEX = '[' + HEX_GRP + ']'
-
- PERCENT = '%' + HEX + HEX
-
- PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
-
- PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
-
- PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
-
- PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
-
- PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
- '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
- PN_CHARS_GRP + ':]|' + PLX + '))?')
-
+ # character group definitions ::
+ PN_CHARS_BASE_GRP = ('a-zA-Z'
+ '\u00c0-\u00d6'
+ '\u00d8-\u00f6'
+ '\u00f8-\u02ff'
+ '\u0370-\u037d'
+ '\u037f-\u1fff'
+ '\u200c-\u200d'
+ '\u2070-\u218f'
+ '\u2c00-\u2fef'
+ '\u3001-\ud7ff'
+ '\uf900-\ufdcf'
+ '\ufdf0-\ufffd')
+
+ PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
+
+ PN_CHARS_GRP = (PN_CHARS_U_GRP +
+ r'\-' +
+ r'0-9' +
+ '\u00b7' +
+ '\u0300-\u036f' +
+ '\u203f-\u2040')
+
+ PN_CHARS = '[' + PN_CHARS_GRP + ']'
+
+ PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
+
+ PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
+
+ HEX_GRP = '0-9A-Fa-f'
+
+ HEX = '[' + HEX_GRP + ']'
+
+ PERCENT = '%' + HEX + HEX
+
+ PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
+
+ PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
+
+ PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
+
+ PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
+
+ PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
+ '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
+ PN_CHARS_GRP + ':]|' + PLX + '))?')
+
patterns = {
- 'PNAME_NS': r'((?:[a-zA-Z][\w-]*)?\:)', # Simplified character range
+ 'PNAME_NS': r'((?:[a-zA-Z][\w-]*)?\:)', # Simplified character range
'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)'
}
tokens = {
'root': [
- (r'\s+', Text),
+ (r'\s+', Text),
# Base / prefix
(r'(@base|BASE)(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
@@ -257,8 +257,8 @@ class TurtleLexer(RegexLexer):
(r'%(IRIREF)s' % patterns, Name.Variable),
# PrefixedName
- (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
- bygroups(Name.Namespace, Punctuation, Name.Tag)),
+ (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
+ bygroups(Name.Namespace, Punctuation, Name.Tag)),
# Comment
(r'#[^\n]+', Comment),
@@ -298,7 +298,7 @@ class TurtleLexer(RegexLexer):
(r'.', String, '#pop'),
],
'end-of-string': [
- (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
+ (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
bygroups(Operator, Generic.Emph), '#pop:2'),
(r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'),
@@ -314,149 +314,149 @@ class TurtleLexer(RegexLexer):
for t in ('@base ', 'BASE ', '@prefix ', 'PREFIX '):
if re.search(r'^\s*%s' % t, text):
return 0.80
-
-
-class ShExCLexer(RegexLexer):
- """
- Lexer for `ShExC <https://shex.io/shex-semantics/#shexc>`_ shape expressions language syntax.
- """
- name = 'ShExC'
- aliases = ['shexc', 'shex']
- filenames = ['*.shex']
- mimetypes = ['text/shex']
-
- # character group definitions ::
-
- PN_CHARS_BASE_GRP = ('a-zA-Z'
- '\u00c0-\u00d6'
- '\u00d8-\u00f6'
- '\u00f8-\u02ff'
- '\u0370-\u037d'
- '\u037f-\u1fff'
- '\u200c-\u200d'
- '\u2070-\u218f'
- '\u2c00-\u2fef'
- '\u3001-\ud7ff'
- '\uf900-\ufdcf'
- '\ufdf0-\ufffd')
-
- PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
-
- PN_CHARS_GRP = (PN_CHARS_U_GRP +
- r'\-' +
- r'0-9' +
- '\u00b7' +
- '\u0300-\u036f' +
- '\u203f-\u2040')
-
- HEX_GRP = '0-9A-Fa-f'
-
- PN_LOCAL_ESC_CHARS_GRP = r"_~.\-!$&'()*+,;=/?#@%"
-
- # terminal productions ::
-
- PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
-
- PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
-
- PN_CHARS = '[' + PN_CHARS_GRP + ']'
-
- HEX = '[' + HEX_GRP + ']'
-
- PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
-
- UCHAR_NO_BACKSLASH = '(?:u' + HEX + '{4}|U' + HEX + '{8})'
-
- UCHAR = r'\\' + UCHAR_NO_BACKSLASH
-
- IRIREF = r'<(?:[^\x00-\x20<>"{}|^`\\]|' + UCHAR + ')*>'
-
- BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
- '.]*' + PN_CHARS + ')?'
-
- PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
-
- PERCENT = '%' + HEX + HEX
-
- PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
-
- PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
-
- PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
- '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
- PN_CHARS_GRP + ':]|' + PLX + '))?')
-
- EXPONENT = r'[eE][+-]?\d+'
-
- # Lexer token definitions ::
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- # keywords ::
- (r'(?i)(base|prefix|start|external|'
- r'literal|iri|bnode|nonliteral|length|minlength|maxlength|'
- r'mininclusive|minexclusive|maxinclusive|maxexclusive|'
- r'totaldigits|fractiondigits|'
- r'closed|extra)\b', Keyword),
- (r'(a)\b', Keyword),
- # IRIs ::
- ('(' + IRIREF + ')', Name.Label),
- # blank nodes ::
- ('(' + BLANK_NODE_LABEL + ')', Name.Label),
- # prefixed names ::
- (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + ')?',
- bygroups(Name.Namespace, Punctuation, Name.Tag)),
- # boolean literals ::
- (r'(true|false)', Keyword.Constant),
- # double literals ::
- (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
- # decimal literals ::
- (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
- # integer literals ::
- (r'[+\-]?\d+', Number.Integer),
- # operators ::
- (r'[@|$&=*+?^\-~]', Operator),
- # operator keywords ::
- (r'(?i)(and|or|not)\b', Operator.Word),
- # punctuation characters ::
- (r'[(){}.;,:^\[\]]', Punctuation),
- # line comments ::
- (r'#[^\n]*', Comment),
- # strings ::
- (r'"""', String, 'triple-double-quoted-string'),
- (r'"', String, 'single-double-quoted-string'),
- (r"'''", String, 'triple-single-quoted-string'),
- (r"'", String, 'single-single-quoted-string'),
- ],
- 'triple-double-quoted-string': [
- (r'"""', String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'single-double-quoted-string': [
- (r'"', String, 'end-of-string'),
- (r'[^"\\\n]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'triple-single-quoted-string': [
- (r"'''", String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String.Escape, 'string-escape'),
- ],
- 'single-single-quoted-string': [
- (r"'", String, 'end-of-string'),
- (r"[^'\\\n]+", String),
- (r'\\', String, 'string-escape'),
- ],
- 'string-escape': [
- (UCHAR_NO_BACKSLASH, String.Escape, '#pop'),
- (r'.', String.Escape, '#pop'),
- ],
- 'end-of-string': [
- (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
- bygroups(Operator, Name.Function), '#pop:2'),
- (r'\^\^', Operator, '#pop:2'),
- default('#pop:2'),
- ],
- }
+
+
+class ShExCLexer(RegexLexer):
+ """
+ Lexer for `ShExC <https://shex.io/shex-semantics/#shexc>`_ shape expressions language syntax.
+ """
+ name = 'ShExC'
+ aliases = ['shexc', 'shex']
+ filenames = ['*.shex']
+ mimetypes = ['text/shex']
+
+ # character group definitions ::
+
+ PN_CHARS_BASE_GRP = ('a-zA-Z'
+ '\u00c0-\u00d6'
+ '\u00d8-\u00f6'
+ '\u00f8-\u02ff'
+ '\u0370-\u037d'
+ '\u037f-\u1fff'
+ '\u200c-\u200d'
+ '\u2070-\u218f'
+ '\u2c00-\u2fef'
+ '\u3001-\ud7ff'
+ '\uf900-\ufdcf'
+ '\ufdf0-\ufffd')
+
+ PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
+
+ PN_CHARS_GRP = (PN_CHARS_U_GRP +
+ r'\-' +
+ r'0-9' +
+ '\u00b7' +
+ '\u0300-\u036f' +
+ '\u203f-\u2040')
+
+ HEX_GRP = '0-9A-Fa-f'
+
+ PN_LOCAL_ESC_CHARS_GRP = r"_~.\-!$&'()*+,;=/?#@%"
+
+ # terminal productions ::
+
+ PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
+
+ PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
+
+ PN_CHARS = '[' + PN_CHARS_GRP + ']'
+
+ HEX = '[' + HEX_GRP + ']'
+
+ PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
+
+ UCHAR_NO_BACKSLASH = '(?:u' + HEX + '{4}|U' + HEX + '{8})'
+
+ UCHAR = r'\\' + UCHAR_NO_BACKSLASH
+
+ IRIREF = r'<(?:[^\x00-\x20<>"{}|^`\\]|' + UCHAR + ')*>'
+
+ BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
+ '.]*' + PN_CHARS + ')?'
+
+ PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
+
+ PERCENT = '%' + HEX + HEX
+
+ PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
+
+ PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
+
+ PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
+ '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
+ PN_CHARS_GRP + ':]|' + PLX + '))?')
+
+ EXPONENT = r'[eE][+-]?\d+'
+
+ # Lexer token definitions ::
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ # keywords ::
+ (r'(?i)(base|prefix|start|external|'
+ r'literal|iri|bnode|nonliteral|length|minlength|maxlength|'
+ r'mininclusive|minexclusive|maxinclusive|maxexclusive|'
+ r'totaldigits|fractiondigits|'
+ r'closed|extra)\b', Keyword),
+ (r'(a)\b', Keyword),
+ # IRIs ::
+ ('(' + IRIREF + ')', Name.Label),
+ # blank nodes ::
+ ('(' + BLANK_NODE_LABEL + ')', Name.Label),
+ # prefixed names ::
+ (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + ')?',
+ bygroups(Name.Namespace, Punctuation, Name.Tag)),
+ # boolean literals ::
+ (r'(true|false)', Keyword.Constant),
+ # double literals ::
+ (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
+ # decimal literals ::
+ (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
+ # integer literals ::
+ (r'[+\-]?\d+', Number.Integer),
+ # operators ::
+ (r'[@|$&=*+?^\-~]', Operator),
+ # operator keywords ::
+ (r'(?i)(and|or|not)\b', Operator.Word),
+ # punctuation characters ::
+ (r'[(){}.;,:^\[\]]', Punctuation),
+ # line comments ::
+ (r'#[^\n]*', Comment),
+ # strings ::
+ (r'"""', String, 'triple-double-quoted-string'),
+ (r'"', String, 'single-double-quoted-string'),
+ (r"'''", String, 'triple-single-quoted-string'),
+ (r"'", String, 'single-single-quoted-string'),
+ ],
+ 'triple-double-quoted-string': [
+ (r'"""', String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-double-quoted-string': [
+ (r'"', String, 'end-of-string'),
+ (r'[^"\\\n]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'triple-single-quoted-string': [
+ (r"'''", String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String.Escape, 'string-escape'),
+ ],
+ 'single-single-quoted-string': [
+ (r"'", String, 'end-of-string'),
+ (r"[^'\\\n]+", String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'string-escape': [
+ (UCHAR_NO_BACKSLASH, String.Escape, '#pop'),
+ (r'.', String.Escape, '#pop'),
+ ],
+ 'end-of-string': [
+ (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
+ bygroups(Operator, Name.Function), '#pop:2'),
+ (r'\^\^', Operator, '#pop:2'),
+ default('#pop:2'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/rebol.py b/contrib/python/Pygments/py3/pygments/lexers/rebol.py
index 57480a1cb9..7bdfb6f26c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/rebol.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/rebol.py
@@ -4,7 +4,7 @@
Lexers for the REBOL and related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/resource.py b/contrib/python/Pygments/py3/pygments/lexers/resource.py
index 3ed176a181..f6506cd12c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/resource.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/resource.py
@@ -4,7 +4,7 @@
Lexer for resource definition files.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -24,8 +24,8 @@ class ResourceLexer(RegexLexer):
.. versionadded:: 2.0
"""
name = 'ResourceBundle'
- aliases = ['resourcebundle', 'resource']
- filenames = []
+ aliases = ['resourcebundle', 'resource']
+ filenames = []
_types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
':int', ':alias')
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ride.py b/contrib/python/Pygments/py3/pygments/lexers/ride.py
index 07cc1ef6b7..2892ec0264 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ride.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ride.py
@@ -1,138 +1,138 @@
-"""
- pygments.lexers.ride
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Ride programming language.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include
-from pygments.token import Comment, Keyword, Name, Number, Punctuation, String, Text
-
-__all__ = ['RideLexer']
-
-
-class RideLexer(RegexLexer):
- """
- For `Ride <https://docs.wavesplatform.com/en/ride/about-ride.html>`_
- source code.
-
- .. versionadded:: 2.6
- """
-
- name = 'Ride'
- aliases = ['ride']
- filenames = ['*.ride']
- mimetypes = ['text/x-ride']
-
- validName = r'[a-zA-Z_][a-zA-Z0-9_\']*'
-
- builtinOps = (
- '||', '|', '>=', '>', '==', '!',
- '=', '<=', '<', '::', ':+', ':', '!=', '/',
- '.', '=>', '-', '+', '*', '&&', '%', '++',
- )
-
- globalVariablesName = (
- 'NOALG', 'MD5', 'SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512',
- 'SHA3224', 'SHA3256', 'SHA3384', 'SHA3512', 'nil', 'this', 'unit',
- 'height', 'lastBlock', 'Buy', 'Sell', 'CEILING', 'FLOOR', 'DOWN',
- 'HALFDOWN', 'HALFEVEN', 'HALFUP', 'UP',
- )
-
- typesName = (
- 'Unit', 'Int', 'Boolean', 'ByteVector', 'String', 'Address', 'Alias',
- 'Transfer', 'AssetPair', 'DataEntry', 'Order', 'Transaction',
- 'GenesisTransaction', 'PaymentTransaction', 'ReissueTransaction',
- 'BurnTransaction', 'MassTransferTransaction', 'ExchangeTransaction',
- 'TransferTransaction', 'SetAssetScriptTransaction',
- 'InvokeScriptTransaction', 'IssueTransaction', 'LeaseTransaction',
- 'LeaseCancelTransaction', 'CreateAliasTransaction',
- 'SetScriptTransaction', 'SponsorFeeTransaction', 'DataTransaction',
- 'WriteSet', 'AttachedPayment', 'ScriptTransfer', 'TransferSet',
- 'ScriptResult', 'Invocation', 'Asset', 'BlockInfo', 'Issue', 'Reissue',
- 'Burn', 'NoAlg', 'Md5', 'Sha1', 'Sha224', 'Sha256', 'Sha384', 'Sha512',
- 'Sha3224', 'Sha3256', 'Sha3384', 'Sha3512', 'BinaryEntry',
- 'BooleanEntry', 'IntegerEntry', 'StringEntry', 'List', 'Ceiling',
- 'Down', 'Floor', 'HalfDown', 'HalfEven', 'HalfUp', 'Up',
- )
-
- functionsName = (
- 'fraction', 'size', 'toBytes', 'take', 'drop', 'takeRight', 'dropRight',
- 'toString', 'isDefined', 'extract', 'throw', 'getElement', 'value',
- 'cons', 'toUtf8String', 'toInt', 'indexOf', 'lastIndexOf', 'split',
- 'parseInt', 'parseIntValue', 'keccak256', 'blake2b256', 'sha256',
- 'sigVerify', 'toBase58String', 'fromBase58String', 'toBase64String',
- 'fromBase64String', 'transactionById', 'transactionHeightById',
- 'getInteger', 'getBoolean', 'getBinary', 'getString',
- 'addressFromPublicKey', 'addressFromString', 'addressFromRecipient',
- 'assetBalance', 'wavesBalance', 'getIntegerValue', 'getBooleanValue',
- 'getBinaryValue', 'getStringValue', 'addressFromStringValue',
- 'assetInfo', 'rsaVerify', 'checkMerkleProof', 'median',
- 'valueOrElse', 'valueOrErrorMessage', 'contains', 'log', 'pow',
- 'toBase16String', 'fromBase16String', 'blockInfoByHeight',
- 'transferTransactionById',
- )
-
- reservedWords = words((
- 'match', 'case', 'else', 'func', 'if',
- 'let', 'then', '@Callable', '@Verifier',
- ), suffix=r'\b')
-
- tokens = {
- 'root': [
- # Comments
- (r'#.*', Comment.Single),
- # Whitespace
- (r'\s+', Text),
- # Strings
- (r'"', String, 'doublequote'),
- (r'utf8\'', String, 'utf8quote'),
- (r'base(58|64|16)\'', String, 'singlequote'),
- # Keywords
- (reservedWords, Keyword.Reserved),
- (r'\{-#.*?#-\}', Keyword.Reserved),
- (r'FOLD<\d+>', Keyword.Reserved),
- # Types
- (words(typesName), Keyword.Type),
- # Main
- # (specialName, Keyword.Reserved),
- # Prefix Operators
- (words(builtinOps, prefix=r'\(', suffix=r'\)'), Name.Function),
- # Infix Operators
- (words(builtinOps), Name.Function),
- (words(globalVariablesName), Name.Function),
- (words(functionsName), Name.Function),
- # Numbers
- include('numbers'),
- # Variable Names
- (validName, Name.Variable),
- # Parens
- (r'[,()\[\]{}]', Punctuation),
- ],
-
- 'doublequote': [
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\[nrfvb\\"]', String.Escape),
- (r'[^"]', String),
- (r'"', String, '#pop'),
- ],
-
- 'utf8quote': [
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\[nrfvb\\\']', String.Escape),
- (r'[^\']', String),
- (r'\'', String, '#pop'),
- ],
-
- 'singlequote': [
- (r'[^\']', String),
- (r'\'', String, '#pop'),
- ],
-
- 'numbers': [
- (r'_?\d+', Number.Integer),
- ],
- }
+"""
+ pygments.lexers.ride
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Ride programming language.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, include
+from pygments.token import Comment, Keyword, Name, Number, Punctuation, String, Text
+
+__all__ = ['RideLexer']
+
+
+class RideLexer(RegexLexer):
+ """
+ For `Ride <https://docs.wavesplatform.com/en/ride/about-ride.html>`_
+ source code.
+
+ .. versionadded:: 2.6
+ """
+
+ name = 'Ride'
+ aliases = ['ride']
+ filenames = ['*.ride']
+ mimetypes = ['text/x-ride']
+
+ validName = r'[a-zA-Z_][a-zA-Z0-9_\']*'
+
+ builtinOps = (
+ '||', '|', '>=', '>', '==', '!',
+ '=', '<=', '<', '::', ':+', ':', '!=', '/',
+ '.', '=>', '-', '+', '*', '&&', '%', '++',
+ )
+
+ globalVariablesName = (
+ 'NOALG', 'MD5', 'SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512',
+ 'SHA3224', 'SHA3256', 'SHA3384', 'SHA3512', 'nil', 'this', 'unit',
+ 'height', 'lastBlock', 'Buy', 'Sell', 'CEILING', 'FLOOR', 'DOWN',
+ 'HALFDOWN', 'HALFEVEN', 'HALFUP', 'UP',
+ )
+
+ typesName = (
+ 'Unit', 'Int', 'Boolean', 'ByteVector', 'String', 'Address', 'Alias',
+ 'Transfer', 'AssetPair', 'DataEntry', 'Order', 'Transaction',
+ 'GenesisTransaction', 'PaymentTransaction', 'ReissueTransaction',
+ 'BurnTransaction', 'MassTransferTransaction', 'ExchangeTransaction',
+ 'TransferTransaction', 'SetAssetScriptTransaction',
+ 'InvokeScriptTransaction', 'IssueTransaction', 'LeaseTransaction',
+ 'LeaseCancelTransaction', 'CreateAliasTransaction',
+ 'SetScriptTransaction', 'SponsorFeeTransaction', 'DataTransaction',
+ 'WriteSet', 'AttachedPayment', 'ScriptTransfer', 'TransferSet',
+ 'ScriptResult', 'Invocation', 'Asset', 'BlockInfo', 'Issue', 'Reissue',
+ 'Burn', 'NoAlg', 'Md5', 'Sha1', 'Sha224', 'Sha256', 'Sha384', 'Sha512',
+ 'Sha3224', 'Sha3256', 'Sha3384', 'Sha3512', 'BinaryEntry',
+ 'BooleanEntry', 'IntegerEntry', 'StringEntry', 'List', 'Ceiling',
+ 'Down', 'Floor', 'HalfDown', 'HalfEven', 'HalfUp', 'Up',
+ )
+
+ functionsName = (
+ 'fraction', 'size', 'toBytes', 'take', 'drop', 'takeRight', 'dropRight',
+ 'toString', 'isDefined', 'extract', 'throw', 'getElement', 'value',
+ 'cons', 'toUtf8String', 'toInt', 'indexOf', 'lastIndexOf', 'split',
+ 'parseInt', 'parseIntValue', 'keccak256', 'blake2b256', 'sha256',
+ 'sigVerify', 'toBase58String', 'fromBase58String', 'toBase64String',
+ 'fromBase64String', 'transactionById', 'transactionHeightById',
+ 'getInteger', 'getBoolean', 'getBinary', 'getString',
+ 'addressFromPublicKey', 'addressFromString', 'addressFromRecipient',
+ 'assetBalance', 'wavesBalance', 'getIntegerValue', 'getBooleanValue',
+ 'getBinaryValue', 'getStringValue', 'addressFromStringValue',
+ 'assetInfo', 'rsaVerify', 'checkMerkleProof', 'median',
+ 'valueOrElse', 'valueOrErrorMessage', 'contains', 'log', 'pow',
+ 'toBase16String', 'fromBase16String', 'blockInfoByHeight',
+ 'transferTransactionById',
+ )
+
+ reservedWords = words((
+ 'match', 'case', 'else', 'func', 'if',
+ 'let', 'then', '@Callable', '@Verifier',
+ ), suffix=r'\b')
+
+ tokens = {
+ 'root': [
+ # Comments
+ (r'#.*', Comment.Single),
+ # Whitespace
+ (r'\s+', Text),
+ # Strings
+ (r'"', String, 'doublequote'),
+ (r'utf8\'', String, 'utf8quote'),
+ (r'base(58|64|16)\'', String, 'singlequote'),
+ # Keywords
+ (reservedWords, Keyword.Reserved),
+ (r'\{-#.*?#-\}', Keyword.Reserved),
+ (r'FOLD<\d+>', Keyword.Reserved),
+ # Types
+ (words(typesName), Keyword.Type),
+ # Main
+ # (specialName, Keyword.Reserved),
+ # Prefix Operators
+ (words(builtinOps, prefix=r'\(', suffix=r'\)'), Name.Function),
+ # Infix Operators
+ (words(builtinOps), Name.Function),
+ (words(globalVariablesName), Name.Function),
+ (words(functionsName), Name.Function),
+ # Numbers
+ include('numbers'),
+ # Variable Names
+ (validName, Name.Variable),
+ # Parens
+ (r'[,()\[\]{}]', Punctuation),
+ ],
+
+ 'doublequote': [
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\[nrfvb\\"]', String.Escape),
+ (r'[^"]', String),
+ (r'"', String, '#pop'),
+ ],
+
+ 'utf8quote': [
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\[nrfvb\\\']', String.Escape),
+ (r'[^\']', String),
+ (r'\'', String, '#pop'),
+ ],
+
+ 'singlequote': [
+ (r'[^\']', String),
+ (r'\'', String, '#pop'),
+ ],
+
+ 'numbers': [
+ (r'_?\d+', Number.Integer),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/rnc.py b/contrib/python/Pygments/py3/pygments/lexers/rnc.py
index cc8950a0b7..2be2a960c3 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/rnc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/rnc.py
@@ -4,7 +4,7 @@
Lexer for Relax-NG Compact syntax
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,7 +23,7 @@ class RNCCompactLexer(RegexLexer):
"""
name = 'Relax-NG Compact'
- aliases = ['rng-compact', 'rnc']
+ aliases = ['rng-compact', 'rnc']
filenames = ['*.rnc']
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/roboconf.py b/contrib/python/Pygments/py3/pygments/lexers/roboconf.py
index 4380113831..764ae30b7c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/roboconf.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/roboconf.py
@@ -4,7 +4,7 @@
Lexers for Roboconf DSL.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/robotframework.py b/contrib/python/Pygments/py3/pygments/lexers/robotframework.py
index 3c212f5e20..4406e0404d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/robotframework.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/robotframework.py
@@ -4,7 +4,7 @@
Lexer for Robot Framework.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -62,7 +62,7 @@ class RobotFrameworkLexer(Lexer):
"""
name = 'RobotFramework'
aliases = ['robotframework']
- filenames = ['*.robot']
+ filenames = ['*.robot']
mimetypes = ['text/x-robotframework']
def __init__(self, **options):
@@ -78,11 +78,11 @@ class RobotFrameworkLexer(Lexer):
for value, token in row_tokenizer.tokenize(row):
for value, token in var_tokenizer.tokenize(value, token):
if value:
- yield index, token, str(value)
+ yield index, token, str(value)
index += len(value)
-class VariableTokenizer:
+class VariableTokenizer:
def tokenize(self, string, token):
var = VariableSplitter(string, identifiers='$@%&')
@@ -97,16 +97,16 @@ class VariableTokenizer:
before = string[:var.start]
yield before, orig_token
yield var.identifier + '{', SYNTAX
- yield from self.tokenize(var.base, VARIABLE)
+ yield from self.tokenize(var.base, VARIABLE)
yield '}', SYNTAX
if var.index is not None:
yield '[', SYNTAX
- yield from self.tokenize(var.index, VARIABLE)
+ yield from self.tokenize(var.index, VARIABLE)
yield ']', SYNTAX
- yield from self.tokenize(string[var.end:], orig_token)
+ yield from self.tokenize(string[var.end:], orig_token)
-class RowTokenizer:
+class RowTokenizer:
def __init__(self):
self._table = UnknownTable()
@@ -119,7 +119,7 @@ class RowTokenizer:
'metadata': settings,
'variables': variables, 'variable': variables,
'testcases': testcases, 'testcase': testcases,
- 'tasks': testcases, 'task': testcases,
+ 'tasks': testcases, 'task': testcases,
'keywords': keywords, 'keyword': keywords,
'userkeywords': keywords, 'userkeyword': keywords}
@@ -134,8 +134,8 @@ class RowTokenizer:
elif index == 0 and value.startswith('*'):
self._table = self._start_table(value)
heading = True
- yield from self._tokenize(value, index, commented,
- separator, heading)
+ yield from self._tokenize(value, index, commented,
+ separator, heading)
self._table.end_row()
def _start_table(self, header):
@@ -150,22 +150,22 @@ class RowTokenizer:
elif heading:
yield value, HEADING
else:
- yield from self._table.tokenize(value, index)
+ yield from self._table.tokenize(value, index)
-class RowSplitter:
+class RowSplitter:
_space_splitter = re.compile('( {2,})')
_pipe_splitter = re.compile(r'((?:^| +)\|(?: +|$))')
def split(self, row):
splitter = (row.startswith('| ') and self._split_from_pipes
or self._split_from_spaces)
- yield from splitter(row)
+ yield from splitter(row)
yield '\n'
def _split_from_spaces(self, row):
yield '' # Start with (pseudo)separator similarly as with pipes
- yield from self._space_splitter.split(row)
+ yield from self._space_splitter.split(row)
def _split_from_pipes(self, row):
_, separator, rest = self._pipe_splitter.split(row, 1)
@@ -177,7 +177,7 @@ class RowSplitter:
yield rest
-class Tokenizer:
+class Tokenizer:
_tokens = None
def __init__(self):
@@ -208,11 +208,11 @@ class Comment(Tokenizer):
class Setting(Tokenizer):
_tokens = (SETTING, ARGUMENT)
_keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
- 'suitepostcondition', 'testsetup', 'tasksetup', 'testprecondition',
- 'testteardown','taskteardown', 'testpostcondition', 'testtemplate', 'tasktemplate')
+ 'suitepostcondition', 'testsetup', 'tasksetup', 'testprecondition',
+ 'testteardown','taskteardown', 'testpostcondition', 'testtemplate', 'tasktemplate')
_import_settings = ('library', 'resource', 'variables')
_other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
- 'testtimeout','tasktimeout')
+ 'testtimeout','tasktimeout')
_custom_tokenizer = None
def __init__(self, template_setter=None):
@@ -284,7 +284,7 @@ class KeywordCall(Tokenizer):
return GherkinTokenizer().tokenize(value, KEYWORD)
-class GherkinTokenizer:
+class GherkinTokenizer:
_gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE)
def tokenize(self, value, token):
@@ -312,7 +312,7 @@ class ForLoop(Tokenizer):
return token
-class _Table:
+class _Table:
_tokenizer_class = None
def __init__(self, prev_tokenizer=None):
@@ -325,7 +325,7 @@ class _Table:
self._tokenizer = self._prev_tokenizer
yield value, SYNTAX
else:
- yield from self._tokenize(value, index)
+ yield from self._tokenize(value, index)
self._prev_values_on_row.append(value)
def _continues(self, value, index):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ruby.py b/contrib/python/Pygments/py3/pygments/lexers/ruby.py
index 2b2f923eb9..cd78686f82 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ruby.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ruby.py
@@ -4,7 +4,7 @@
Lexers for Ruby and related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -33,7 +33,7 @@ class RubyLexer(ExtendedRegexLexer):
"""
name = 'Ruby'
- aliases = ['ruby', 'rb', 'duby']
+ aliases = ['ruby', 'rb', 'duby']
filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
'*.rbx', '*.duby', 'Gemfile', 'Vagrantfile']
mimetypes = ['text/x-ruby', 'application/x-ruby']
@@ -42,25 +42,25 @@ class RubyLexer(ExtendedRegexLexer):
def heredoc_callback(self, match, ctx):
# okay, this is the hardest part of parsing Ruby...
- # match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
+ # match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
start = match.start(1)
- yield start, Operator, match.group(1) # <<[-~]?
+ yield start, Operator, match.group(1) # <<[-~]?
yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
yield match.start(3), String.Delimiter, match.group(3) # heredoc name
yield match.start(4), String.Heredoc, match.group(4) # quote again
heredocstack = ctx.__dict__.setdefault('heredocstack', [])
outermost = not bool(heredocstack)
- heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3)))
+ heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3)))
ctx.pos = match.start(5)
ctx.end = match.end(5)
- # this may find other heredocs, so limit the recursion depth
- if len(heredocstack) < 100:
- yield from self.get_tokens_unprocessed(context=ctx)
- else:
- yield ctx.pos, String.Heredoc, match.group(5)
+ # this may find other heredocs, so limit the recursion depth
+ if len(heredocstack) < 100:
+ yield from self.get_tokens_unprocessed(context=ctx)
+ else:
+ yield ctx.pos, String.Heredoc, match.group(5)
ctx.pos = match.end()
if outermost:
@@ -109,18 +109,18 @@ class RubyLexer(ExtendedRegexLexer):
# easy ones
(r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
(words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
- (r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol),
+ (r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol),
(r':"', String.Symbol, 'simple-sym'),
(r'([a-zA-Z_]\w*)(:)(?!:)',
bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
- (r'"', String.Double, 'simple-string-double'),
- (r"'", String.Single, 'simple-string-single'),
+ (r'"', String.Double, 'simple-string-double'),
+ (r"'", String.Single, 'simple-string-single'),
(r'(?<!\.)`', String.Backtick, 'simple-backtick'),
]
- # quoted string and symbol
- for name, ttype, end in ('string-double', String.Double, '"'), \
- ('string-single', String.Single, "'"),\
+ # quoted string and symbol
+ for name, ttype, end in ('string-double', String.Double, '"'), \
+ ('string-single', String.Single, "'"),\
('sym', String.Symbol, '"'), \
('backtick', String.Backtick, '`'):
states['simple-'+name] = [
@@ -249,10 +249,10 @@ class RubyLexer(ExtendedRegexLexer):
Name.Builtin),
(r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
# normal heredocs
- (r'(?<!\w)(<<[-~]?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
+ (r'(?<!\w)(<<[-~]?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
heredoc_callback),
# empty string heredocs
- (r'(<<[-~]?)("|\')()(\2)(.*?\n)', heredoc_callback),
+ (r'(<<[-~]?)("|\')()(\2)(.*?\n)', heredoc_callback),
(r'__END__', Comment.Preproc, 'end-part'),
# multiline regex (after keywords or assignments)
(r'(?:^|(?<=[=<>~!:])|'
@@ -329,13 +329,13 @@ class RubyLexer(ExtendedRegexLexer):
],
'funcname': [
(r'\(', Punctuation, 'defexpr'),
- (r'(?:([a-zA-Z_]\w*)(\.))?' # optional scope name, like "self."
- r'('
- r'[a-zA-Z\u0080-\uffff][a-zA-Z0-9_\u0080-\uffff]*[!?=]?' # method name
- r'|!=|!~|=~|\*\*?|[-+!~]@?|[/%&|^]|<=>|<[<=]?|>[>=]?|===?' # or operator override
- r'|\[\]=?' # or element reference/assignment override
- r'|`' # or the undocumented backtick override
- r')',
+ (r'(?:([a-zA-Z_]\w*)(\.))?' # optional scope name, like "self."
+ r'('
+ r'[a-zA-Z\u0080-\uffff][a-zA-Z0-9_\u0080-\uffff]*[!?=]?' # method name
+ r'|!=|!~|=~|\*\*?|[-+!~]@?|[/%&|^]|<=>|<[<=]?|>[>=]?|===?' # or operator override
+ r'|\[\]=?' # or element reference/assignment override
+ r'|`' # or the undocumented backtick override
+ r')',
bygroups(Name.Class, Operator, Name.Function), '#pop'),
default('#pop')
],
@@ -427,14 +427,14 @@ class RubyConsoleLexer(Lexer):
curcode += line[end:]
else:
if curcode:
- yield from do_insertions(
- insertions, rblexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(
+ insertions, rblexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
yield match.start(), Generic.Output, line
if curcode:
- yield from do_insertions(
- insertions, rblexer.get_tokens_unprocessed(curcode))
+ yield from do_insertions(
+ insertions, rblexer.get_tokens_unprocessed(curcode))
class FancyLexer(RegexLexer):
@@ -455,26 +455,26 @@ class FancyLexer(RegexLexer):
tokens = {
# copied from PerlLexer:
'balanced-regex': [
- (r'/(\\\\|\\[^\\]|[^/\\])*/[egimosx]*', String.Regex, '#pop'),
- (r'!(\\\\|\\[^\\]|[^!\\])*![egimosx]*', String.Regex, '#pop'),
+ (r'/(\\\\|\\[^\\]|[^/\\])*/[egimosx]*', String.Regex, '#pop'),
+ (r'!(\\\\|\\[^\\]|[^!\\])*![egimosx]*', String.Regex, '#pop'),
(r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
- (r'\{(\\\\|\\[^\\]|[^}\\])*\}[egimosx]*', String.Regex, '#pop'),
- (r'<(\\\\|\\[^\\]|[^>\\])*>[egimosx]*', String.Regex, '#pop'),
- (r'\[(\\\\|\\[^\\]|[^\]\\])*\][egimosx]*', String.Regex, '#pop'),
- (r'\((\\\\|\\[^\\]|[^)\\])*\)[egimosx]*', String.Regex, '#pop'),
- (r'@(\\\\|\\[^\\]|[^@\\])*@[egimosx]*', String.Regex, '#pop'),
- (r'%(\\\\|\\[^\\]|[^%\\])*%[egimosx]*', String.Regex, '#pop'),
- (r'\$(\\\\|\\[^\\]|[^$\\])*\$[egimosx]*', String.Regex, '#pop'),
+ (r'\{(\\\\|\\[^\\]|[^}\\])*\}[egimosx]*', String.Regex, '#pop'),
+ (r'<(\\\\|\\[^\\]|[^>\\])*>[egimosx]*', String.Regex, '#pop'),
+ (r'\[(\\\\|\\[^\\]|[^\]\\])*\][egimosx]*', String.Regex, '#pop'),
+ (r'\((\\\\|\\[^\\]|[^)\\])*\)[egimosx]*', String.Regex, '#pop'),
+ (r'@(\\\\|\\[^\\]|[^@\\])*@[egimosx]*', String.Regex, '#pop'),
+ (r'%(\\\\|\\[^\\]|[^%\\])*%[egimosx]*', String.Regex, '#pop'),
+ (r'\$(\\\\|\\[^\\]|[^$\\])*\$[egimosx]*', String.Regex, '#pop'),
],
'root': [
(r'\s+', Text),
# balanced delimiters (copied from PerlLexer):
- (r's\{(\\\\|\\[^\\]|[^}\\])*\}\s*', String.Regex, 'balanced-regex'),
- (r's<(\\\\|\\[^\\]|[^>\\])*>\s*', String.Regex, 'balanced-regex'),
- (r's\[(\\\\|\\[^\\]|[^\]\\])*\]\s*', String.Regex, 'balanced-regex'),
- (r's\((\\\\|\\[^\\]|[^)\\])*\)\s*', String.Regex, 'balanced-regex'),
- (r'm?/(\\\\|\\[^\\]|[^///\n])*/[gcimosx]*', String.Regex),
+ (r's\{(\\\\|\\[^\\]|[^}\\])*\}\s*', String.Regex, 'balanced-regex'),
+ (r's<(\\\\|\\[^\\]|[^>\\])*>\s*', String.Regex, 'balanced-regex'),
+ (r's\[(\\\\|\\[^\\]|[^\]\\])*\]\s*', String.Regex, 'balanced-regex'),
+ (r's\((\\\\|\\[^\\]|[^)\\])*\)\s*', String.Regex, 'balanced-regex'),
+ (r'm?/(\\\\|\\[^\\]|[^///\n])*/[gcimosx]*', String.Regex),
(r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
# Comments
@@ -482,9 +482,9 @@ class FancyLexer(RegexLexer):
# Symbols
(r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol),
# Multi-line DoubleQuotedString
- (r'"""(\\\\|\\[^\\]|[^\\])*?"""', String),
+ (r'"""(\\\\|\\[^\\]|[^\\])*?"""', String),
# DoubleQuotedString
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# keywords
(r'(def|class|try|catch|finally|retry|return|return_local|match|'
r'case|->|=>)\b', Keyword),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/rust.py b/contrib/python/Pygments/py3/pygments/lexers/rust.py
index d01f73e4a4..db6edd3b77 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/rust.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/rust.py
@@ -4,7 +4,7 @@
Lexers for the Rust language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,56 +17,56 @@ __all__ = ['RustLexer']
class RustLexer(RegexLexer):
"""
- Lexer for the Rust programming language (version 1.47).
+ Lexer for the Rust programming language (version 1.47).
.. versionadded:: 1.6
"""
name = 'Rust'
filenames = ['*.rs', '*.rs.in']
aliases = ['rust', 'rs']
- mimetypes = ['text/rust', 'text/x-rust']
-
- keyword_types = (words((
- 'u8', 'u16', 'u32', 'u64', 'u128', 'i8', 'i16', 'i32', 'i64', 'i128',
- 'usize', 'isize', 'f32', 'f64', 'char', 'str', 'bool',
- ), suffix=r'\b'), Keyword.Type)
-
- builtin_funcs_types = (words((
- 'Copy', 'Send', 'Sized', 'Sync', 'Unpin',
- 'Drop', 'Fn', 'FnMut', 'FnOnce', 'drop',
- 'Box', 'ToOwned', 'Clone',
- 'PartialEq', 'PartialOrd', 'Eq', 'Ord',
- 'AsRef', 'AsMut', 'Into', 'From', 'Default',
- 'Iterator', 'Extend', 'IntoIterator', 'DoubleEndedIterator',
- 'ExactSizeIterator',
- 'Option', 'Some', 'None',
- 'Result', 'Ok', 'Err',
- 'String', 'ToString', 'Vec',
- ), suffix=r'\b'), Name.Builtin)
-
- builtin_macros = (words((
- 'asm', 'assert', 'assert_eq', 'assert_ne', 'cfg', 'column',
- 'compile_error', 'concat', 'concat_idents', 'dbg', 'debug_assert',
- 'debug_assert_eq', 'debug_assert_ne', 'env', 'eprint', 'eprintln',
- 'file', 'format', 'format_args', 'format_args_nl', 'global_asm',
- 'include', 'include_bytes', 'include_str',
- 'is_aarch64_feature_detected',
- 'is_arm_feature_detected',
- 'is_mips64_feature_detected',
- 'is_mips_feature_detected',
- 'is_powerpc64_feature_detected',
- 'is_powerpc_feature_detected',
- 'is_x86_feature_detected',
- 'line', 'llvm_asm', 'log_syntax', 'macro_rules', 'matches',
- 'module_path', 'option_env', 'panic', 'print', 'println', 'stringify',
- 'thread_local', 'todo', 'trace_macros', 'unimplemented', 'unreachable',
- 'vec', 'write', 'writeln',
- ), suffix=r'!'), Name.Function.Magic)
-
+ mimetypes = ['text/rust', 'text/x-rust']
+
+ keyword_types = (words((
+ 'u8', 'u16', 'u32', 'u64', 'u128', 'i8', 'i16', 'i32', 'i64', 'i128',
+ 'usize', 'isize', 'f32', 'f64', 'char', 'str', 'bool',
+ ), suffix=r'\b'), Keyword.Type)
+
+ builtin_funcs_types = (words((
+ 'Copy', 'Send', 'Sized', 'Sync', 'Unpin',
+ 'Drop', 'Fn', 'FnMut', 'FnOnce', 'drop',
+ 'Box', 'ToOwned', 'Clone',
+ 'PartialEq', 'PartialOrd', 'Eq', 'Ord',
+ 'AsRef', 'AsMut', 'Into', 'From', 'Default',
+ 'Iterator', 'Extend', 'IntoIterator', 'DoubleEndedIterator',
+ 'ExactSizeIterator',
+ 'Option', 'Some', 'None',
+ 'Result', 'Ok', 'Err',
+ 'String', 'ToString', 'Vec',
+ ), suffix=r'\b'), Name.Builtin)
+
+ builtin_macros = (words((
+ 'asm', 'assert', 'assert_eq', 'assert_ne', 'cfg', 'column',
+ 'compile_error', 'concat', 'concat_idents', 'dbg', 'debug_assert',
+ 'debug_assert_eq', 'debug_assert_ne', 'env', 'eprint', 'eprintln',
+ 'file', 'format', 'format_args', 'format_args_nl', 'global_asm',
+ 'include', 'include_bytes', 'include_str',
+ 'is_aarch64_feature_detected',
+ 'is_arm_feature_detected',
+ 'is_mips64_feature_detected',
+ 'is_mips_feature_detected',
+ 'is_powerpc64_feature_detected',
+ 'is_powerpc_feature_detected',
+ 'is_x86_feature_detected',
+ 'line', 'llvm_asm', 'log_syntax', 'macro_rules', 'matches',
+ 'module_path', 'option_env', 'panic', 'print', 'println', 'stringify',
+ 'thread_local', 'todo', 'trace_macros', 'unimplemented', 'unreachable',
+ 'vec', 'write', 'writeln',
+ ), suffix=r'!'), Name.Function.Magic)
+
tokens = {
'root': [
# rust allows a file to start with a shebang, but if the first line
- # starts with #![ then it's not a shebang but a crate attribute.
+ # starts with #![ then it's not a shebang but a crate attribute.
(r'#![^[\r\n].*$', Comment.Preproc),
default('base'),
],
@@ -84,79 +84,79 @@ class RustLexer(RegexLexer):
# Macro parameters
(r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
# Keywords
- (words(('as', 'async', 'await', 'box', 'const', 'crate', 'dyn',
- 'else', 'extern', 'for', 'if', 'impl', 'in', 'loop',
- 'match', 'move', 'mut', 'pub', 'ref', 'return', 'static',
- 'super', 'trait', 'unsafe', 'use', 'where', 'while'),
- suffix=r'\b'), Keyword),
- (words(('abstract', 'become', 'do', 'final', 'macro', 'override',
- 'priv', 'typeof', 'try', 'unsized', 'virtual', 'yield'),
- suffix=r'\b'), Keyword.Reserved),
+ (words(('as', 'async', 'await', 'box', 'const', 'crate', 'dyn',
+ 'else', 'extern', 'for', 'if', 'impl', 'in', 'loop',
+ 'match', 'move', 'mut', 'pub', 'ref', 'return', 'static',
+ 'super', 'trait', 'unsafe', 'use', 'where', 'while'),
+ suffix=r'\b'), Keyword),
+ (words(('abstract', 'become', 'do', 'final', 'macro', 'override',
+ 'priv', 'typeof', 'try', 'unsized', 'virtual', 'yield'),
+ suffix=r'\b'), Keyword.Reserved),
(r'(true|false)\b', Keyword.Constant),
- (r'self\b', Name.Builtin.Pseudo),
+ (r'self\b', Name.Builtin.Pseudo),
(r'mod\b', Keyword, 'modname'),
(r'let\b', Keyword.Declaration),
(r'fn\b', Keyword, 'funcname'),
(r'(struct|enum|type|union)\b', Keyword, 'typename'),
(r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)),
keyword_types,
- (r'[sS]elf\b', Name.Builtin.Pseudo),
- # Prelude (taken from Rust's src/libstd/prelude.rs)
- builtin_funcs_types,
- builtin_macros,
+ (r'[sS]elf\b', Name.Builtin.Pseudo),
+ # Prelude (taken from Rust's src/libstd/prelude.rs)
+ builtin_funcs_types,
+ builtin_macros,
# Path seperators, so types don't catch them.
(r'::\b', Text),
# Types in positions.
(r'(?::|->)', Text, 'typename'),
# Labels
- (r'(break|continue)(\b\s*)(\'[A-Za-z_]\w*)?',
+ (r'(break|continue)(\b\s*)(\'[A-Za-z_]\w*)?',
bygroups(Keyword, Text.Whitespace, Name.Label)),
-
- # Character literals
+
+ # Character literals
(r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
String.Char),
(r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
String.Char),
-
- # Binary literals
+
+ # Binary literals
(r'0b[01_]+', Number.Bin, 'number_lit'),
- # Octal literals
+ # Octal literals
(r'0o[0-7_]+', Number.Oct, 'number_lit'),
- # Hexadecimal literals
+ # Hexadecimal literals
(r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
- # Decimal literals
+ # Decimal literals
(r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float,
'number_lit'),
(r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
-
- # String literals
+
+ # String literals
(r'b"', String, 'bytestring'),
(r'"', String, 'string'),
- (r'(?s)b?r(#*)".*?"\1', String),
+ (r'(?s)b?r(#*)".*?"\1', String),
- # Lifetime names
- (r"'", Operator, 'lifetime'),
+ # Lifetime names
+ (r"'", Operator, 'lifetime'),
# Operators and Punctuation
- (r'\.\.=?', Operator),
+ (r'\.\.=?', Operator),
(r'[{}()\[\],.;]', Punctuation),
(r'[+\-*/%&|<>^!~@=:?]', Operator),
- # Identifiers
+ # Identifiers
(r'[a-zA-Z_]\w*', Name),
- # Raw identifiers
- (r'r#[a-zA-Z_]\w*', Name),
+ # Raw identifiers
+ (r'r#[a-zA-Z_]\w*', Name),
# Attributes
(r'#!?\[', Comment.Preproc, 'attribute['),
-
- # Misc
- # Lone hashes: not used in Rust syntax, but allowed in macro
- # arguments, most famously for quote::quote!()
- (r'#', Text),
+
+ # Misc
+ # Lone hashes: not used in Rust syntax, but allowed in macro
+ # arguments, most famously for quote::quote!()
+ (r'#', Text),
],
'comment': [
(r'[^*/]+', Comment.Multiline),
@@ -183,17 +183,17 @@ class RustLexer(RegexLexer):
'typename': [
(r'\s+', Text),
(r'&', Keyword.Pseudo),
- (r"'", Operator, 'lifetime'),
- builtin_funcs_types,
+ (r"'", Operator, 'lifetime'),
+ builtin_funcs_types,
keyword_types,
(r'[a-zA-Z_]\w*', Name.Class, '#pop'),
default('#pop'),
],
- 'lifetime': [
- (r"(static|_)", Name.Builtin),
- (r"[a-zA-Z_]+\w*", Name.Attribute),
- default('#pop'),
- ],
+ 'lifetime': [
+ (r"(static|_)", Name.Builtin),
+ (r"[a-zA-Z_]+\w*", Name.Attribute),
+ default('#pop'),
+ ],
'number_lit': [
(r'[ui](8|16|32|64|size)', Keyword, '#pop'),
(r'f(32|64)', Keyword, '#pop'),
@@ -216,7 +216,7 @@ class RustLexer(RegexLexer):
],
'attribute[': [
include('attribute_common'),
- (r'\]', Comment.Preproc, '#pop'),
- (r'[^"\]\[]+', Comment.Preproc),
+ (r'\]', Comment.Preproc, '#pop'),
+ (r'[^"\]\[]+', Comment.Preproc),
],
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/sas.py b/contrib/python/Pygments/py3/pygments/lexers/sas.py
index 7d7f9d3689..116f86891d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/sas.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/sas.py
@@ -4,7 +4,7 @@
Lexer for SAS.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/scdoc.py b/contrib/python/Pygments/py3/pygments/lexers/scdoc.py
index 48b0682ae3..eb798607fd 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/scdoc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/scdoc.py
@@ -1,82 +1,82 @@
-"""
- pygments.lexers.scdoc
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for scdoc, a simple man page generator.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, \
- using, this
-from pygments.token import Text, Comment, Keyword, String, \
- Generic
-
-
-__all__ = ['ScdocLexer']
-
-
-class ScdocLexer(RegexLexer):
- """
- `scdoc` is a simple man page generator for POSIX systems written in C99.
- https://git.sr.ht/~sircmpwn/scdoc
-
- .. versionadded:: 2.5
- """
- name = 'scdoc'
- aliases = ['scdoc', 'scd']
- filenames = ['*.scd', '*.scdoc']
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- # comment
- (r'^(;.+\n)', bygroups(Comment)),
-
- # heading with pound prefix
- (r'^(#)([^#].+\n)', bygroups(Generic.Heading, Text)),
- (r'^(#{2})(.+\n)', bygroups(Generic.Subheading, Text)),
- # bulleted lists
- (r'^(\s*)([*-])(\s)(.+\n)',
- bygroups(Text, Keyword, Text, using(this, state='inline'))),
- # numbered lists
- (r'^(\s*)(\.+\.)( .+\n)',
- bygroups(Text, Keyword, using(this, state='inline'))),
- # quote
- (r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
- # text block
- (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
-
- include('inline'),
- ],
- 'inline': [
- # escape
- (r'\\.', Text),
- # underlines
- (r'(\s)(_[^_]+_)(\W|\n)', bygroups(Text, Generic.Emph, Text)),
- # bold
- (r'(\s)(\*[^*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)),
- # inline code
- (r'`[^`]+`', String.Backtick),
-
- # general text, must come last!
- (r'[^\\\s]+', Text),
- (r'.', Text),
- ],
- }
-
- def analyse_text(text):
- """This is very similar to markdown, save for the escape characters
- needed for * and _."""
- result = 0
-
- if '\\*' in text:
- result += 0.01
-
- if '\\_' in text:
- result += 0.01
-
- return result
+"""
+ pygments.lexers.scdoc
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for scdoc, a simple man page generator.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, \
+ using, this
+from pygments.token import Text, Comment, Keyword, String, \
+ Generic
+
+
+__all__ = ['ScdocLexer']
+
+
+class ScdocLexer(RegexLexer):
+ """
+ `scdoc` is a simple man page generator for POSIX systems written in C99.
+ https://git.sr.ht/~sircmpwn/scdoc
+
+ .. versionadded:: 2.5
+ """
+ name = 'scdoc'
+ aliases = ['scdoc', 'scd']
+ filenames = ['*.scd', '*.scdoc']
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ # comment
+ (r'^(;.+\n)', bygroups(Comment)),
+
+ # heading with pound prefix
+ (r'^(#)([^#].+\n)', bygroups(Generic.Heading, Text)),
+ (r'^(#{2})(.+\n)', bygroups(Generic.Subheading, Text)),
+ # bulleted lists
+ (r'^(\s*)([*-])(\s)(.+\n)',
+ bygroups(Text, Keyword, Text, using(this, state='inline'))),
+ # numbered lists
+ (r'^(\s*)(\.+\.)( .+\n)',
+ bygroups(Text, Keyword, using(this, state='inline'))),
+ # quote
+ (r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
+ # text block
+ (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
+
+ include('inline'),
+ ],
+ 'inline': [
+ # escape
+ (r'\\.', Text),
+ # underlines
+ (r'(\s)(_[^_]+_)(\W|\n)', bygroups(Text, Generic.Emph, Text)),
+ # bold
+ (r'(\s)(\*[^*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)),
+ # inline code
+ (r'`[^`]+`', String.Backtick),
+
+ # general text, must come last!
+ (r'[^\\\s]+', Text),
+ (r'.', Text),
+ ],
+ }
+
+ def analyse_text(text):
+ """This is very similar to markdown, save for the escape characters
+ needed for * and _."""
+ result = 0
+
+ if '\\*' in text:
+ result += 0.01
+
+ if '\\_' in text:
+ result += 0.01
+
+ return result
diff --git a/contrib/python/Pygments/py3/pygments/lexers/scripting.py b/contrib/python/Pygments/py3/pygments/lexers/scripting.py
index 9a1e63d66a..985a4ee726 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/scripting.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/scripting.py
@@ -4,7 +4,7 @@
Lexer for scripting and embedded languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,11 +14,11 @@ from pygments.lexer import RegexLexer, include, bygroups, default, combined, \
words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error, Whitespace, Other
-from pygments.util import get_bool_opt, get_list_opt
+from pygments.util import get_bool_opt, get_list_opt
__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer',
- 'EasytrieveLexer', 'JclLexer', 'MiniScriptLexer']
+ 'EasytrieveLexer', 'JclLexer', 'MiniScriptLexer']
class LuaLexer(RegexLexer):
@@ -141,7 +141,7 @@ class LuaLexer(RegexLexer):
self._functions = set()
if self.func_name_highlighting:
from pygments.lexers._lua_builtins import MODULES
- for mod, func in MODULES.items():
+ for mod, func in MODULES.items():
if mod not in self.disabled_modules:
self._functions.update(func)
RegexLexer.__init__(self, **options)
@@ -156,7 +156,7 @@ class LuaLexer(RegexLexer):
elif '.' in value:
a, b = value.split('.')
yield index, Name, a
- yield index + len(a), Punctuation, '.'
+ yield index + len(a), Punctuation, '.'
yield index + len(a) + 1, Name, b
continue
yield index, token, value
@@ -168,9 +168,9 @@ class MoonScriptLexer(LuaLexer):
.. versionadded:: 1.5
"""
- name = 'MoonScript'
- aliases = ['moonscript', 'moon']
- filenames = ['*.moon']
+ name = 'MoonScript'
+ aliases = ['moonscript', 'moon']
+ filenames = ['*.moon']
mimetypes = ['text/x-moonscript', 'application/x-moonscript']
tokens = {
@@ -237,7 +237,7 @@ class ChaiscriptLexer(RegexLexer):
"""
name = 'ChaiScript'
- aliases = ['chaiscript', 'chai']
+ aliases = ['chaiscript', 'chai']
filenames = ['*.chai']
mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
@@ -282,7 +282,7 @@ class ChaiscriptLexer(RegexLexer):
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"', String.Double, 'dqstring'),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
],
'dqstring': [
(r'\$\{[^"}]+?\}', String.Interpol),
@@ -658,18 +658,18 @@ class AppleScriptLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Text),
- (r'¬\n', String.Escape),
+ (r'¬\n', String.Escape),
(r"'s\s+", Text), # This is a possessive, consider moving
(r'(--|#).*?$', Comment),
(r'\(\*', Comment.Multiline, 'comment'),
(r'[(){}!,.:]', Punctuation),
- (r'(«)([^»]+)(»)',
+ (r'(«)([^»]+)(»)',
bygroups(Text, Name.Builtin, Text)),
(r'\b((?:considering|ignoring)\s*)'
r'(application responses|case|diacriticals|hyphens|'
r'numeric strings|punctuation|white space)',
bygroups(Keyword, Name.Builtin)),
- (r'(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)', Operator),
+ (r'(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)', Operator),
(r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
(r'^(\s*(?:on|end)\s+)'
r'(%s)' % '|'.join(StudioEvents[::-1]),
@@ -688,7 +688,7 @@ class AppleScriptLexer(RegexLexer):
(r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
(r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
(r'\b(%s)\b' % '|'.join(References), Name.Builtin),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r'\b(%s)\b' % Identifiers, Name.Variable),
(r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
(r'[-+]?\d+', Number.Integer),
@@ -832,7 +832,7 @@ class MOOCodeLexer(RegexLexer):
# Numbers
(r'(0|[1-9][0-9_]*)', Number.Integer),
# Strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# exceptions
(r'(E_PERM|E_DIV)', Name.Exception),
# db-refs
@@ -923,7 +923,7 @@ class HybrisLexer(RegexLexer):
'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket',
'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'),
Keyword.Type),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
(r'(\.)([a-zA-Z_]\w*)',
bygroups(Operator, Name.Attribute)),
@@ -943,16 +943,16 @@ class HybrisLexer(RegexLexer):
],
}
- def analyse_text(text):
- """public method and private method don't seem to be quite common
- elsewhere."""
- result = 0
- if re.search(r'\b(?:public|private)\s+method\b', text):
- result += 0.01
- return result
-
-
+ def analyse_text(text):
+ """public method and private method don't seem to be quite common
+ elsewhere."""
+ result = 0
+ if re.search(r'\b(?:public|private)\s+method\b', text):
+ result += 0.01
+ return result
+
+
class EasytrieveLexer(RegexLexer):
"""
Easytrieve Plus is a programming language for extracting, filtering and
@@ -984,7 +984,7 @@ class EasytrieveLexer(RegexLexer):
_DELIMITER_PATTERN = '[' + _DELIMITERS + ']'
_DELIMITER_PATTERN_CAPTURE = '(' + _DELIMITER_PATTERN + ')'
_NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']'
- _OPERATORS_PATTERN = '[.+\\-/=\\[\\](){}<>;,&%¬]'
+ _OPERATORS_PATTERN = '[.+\\-/=\\[\\](){}<>;,&%¬]'
_KEYWORDS = [
'AFTER-BREAK', 'AFTER-LINE', 'AFTER-SCREEN', 'AIM', 'AND', 'ATTR',
'BEFORE', 'BEFORE-BREAK', 'BEFORE-LINE', 'BEFORE-SCREEN', 'BUSHU',
@@ -1227,57 +1227,57 @@ class JclLexer(RegexLexer):
result = 1.0
assert 0.0 <= result <= 1.0
return result
-
-
-class MiniScriptLexer(RegexLexer):
- """
- For `MiniScript <https://miniscript.org>`_ source code.
-
- .. versionadded:: 2.6
- """
-
- name = 'MiniScript'
- aliases = ['miniscript', 'ms']
- filenames = ['*.ms']
- mimetypes = ['text/x-minicript', 'application/x-miniscript']
-
- tokens = {
- 'root': [
- (r'#!(.*?)$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- ('//.*$', Comment.Single),
- (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number),
- (r'(?i)\d+e[+-]?\d+', Number),
- (r'\d+', Number),
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'"', String, 'string_double'),
- (r'(==|!=|<=|>=|[=+\-*/%^<>.:])', Operator),
- (r'[;,\[\]{}()]', Punctuation),
- (words((
- 'break', 'continue', 'else', 'end', 'for', 'function', 'if',
- 'in', 'isa', 'then', 'repeat', 'return', 'while'), suffix=r'\b'),
- Keyword),
- (words((
- 'abs', 'acos', 'asin', 'atan', 'ceil', 'char', 'cos', 'floor',
- 'log', 'round', 'rnd', 'pi', 'sign', 'sin', 'sqrt', 'str', 'tan',
- 'hasIndex', 'indexOf', 'len', 'val', 'code', 'remove', 'lower',
- 'upper', 'replace', 'split', 'indexes', 'values', 'join', 'sum',
- 'sort', 'shuffle', 'push', 'pop', 'pull', 'range',
- 'print', 'input', 'time', 'wait', 'locals', 'globals', 'outer',
- 'yield'), suffix=r'\b'),
- Name.Builtin),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(and|or|not|new)\b', Operator.Word),
- (r'(self|super|__isa)\b', Name.Builtin.Pseudo),
- (r'[a-zA-Z_]\w*', Name.Variable)
- ],
- 'string_double': [
- (r'[^"\n]+', String),
- (r'""', String),
- (r'"', String, '#pop'),
- (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
- ]
- }
+
+
+class MiniScriptLexer(RegexLexer):
+ """
+ For `MiniScript <https://miniscript.org>`_ source code.
+
+ .. versionadded:: 2.6
+ """
+
+ name = 'MiniScript'
+ aliases = ['miniscript', 'ms']
+ filenames = ['*.ms']
+ mimetypes = ['text/x-minicript', 'application/x-miniscript']
+
+ tokens = {
+ 'root': [
+ (r'#!(.*?)$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ ('//.*$', Comment.Single),
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number),
+ (r'(?i)\d+e[+-]?\d+', Number),
+ (r'\d+', Number),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'"', String, 'string_double'),
+ (r'(==|!=|<=|>=|[=+\-*/%^<>.:])', Operator),
+ (r'[;,\[\]{}()]', Punctuation),
+ (words((
+ 'break', 'continue', 'else', 'end', 'for', 'function', 'if',
+ 'in', 'isa', 'then', 'repeat', 'return', 'while'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'abs', 'acos', 'asin', 'atan', 'ceil', 'char', 'cos', 'floor',
+ 'log', 'round', 'rnd', 'pi', 'sign', 'sin', 'sqrt', 'str', 'tan',
+ 'hasIndex', 'indexOf', 'len', 'val', 'code', 'remove', 'lower',
+ 'upper', 'replace', 'split', 'indexes', 'values', 'join', 'sum',
+ 'sort', 'shuffle', 'push', 'pop', 'pull', 'range',
+ 'print', 'input', 'time', 'wait', 'locals', 'globals', 'outer',
+ 'yield'), suffix=r'\b'),
+ Name.Builtin),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(and|or|not|new)\b', Operator.Word),
+ (r'(self|super|__isa)\b', Name.Builtin.Pseudo),
+ (r'[a-zA-Z_]\w*', Name.Variable)
+ ],
+ 'string_double': [
+ (r'[^"\n]+', String),
+ (r'""', String),
+ (r'"', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/sgf.py b/contrib/python/Pygments/py3/pygments/lexers/sgf.py
index 35c90ff54f..4f09936046 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/sgf.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/sgf.py
@@ -4,7 +4,7 @@
Lexer for Smart Game Format (sgf) file format.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/shell.py b/contrib/python/Pygments/py3/pygments/lexers/shell.py
index fd26a4b3ea..30c87935e1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/shell.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/shell.py
@@ -4,7 +4,7 @@
Lexers for various shells.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,8 +19,8 @@ from pygments.util import shebang_matches
__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
'SlurmBashLexer', 'MSDOSSessionLexer', 'PowerShellLexer',
- 'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer',
- 'ExeclineLexer']
+ 'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer',
+ 'ExeclineLexer']
line_re = re.compile('.*?\n')
@@ -57,7 +57,7 @@ class BashLexer(RegexLexer):
(r'\$', Text),
],
'basic': [
- (r'\b(if|fi|else|while|in|do|done|for|then|return|function|case|'
+ (r'\b(if|fi|else|while|in|do|done|for|then|return|function|case|'
r'select|continue|until|esac|elif)(\s*)\b',
bygroups(Keyword, Text)),
(r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
@@ -149,16 +149,16 @@ class SlurmBashLexer(BashLexer):
else:
yield index, token, value
-
+
class ShellSessionBaseLexer(Lexer):
"""
- Base lexer for shell sessions.
+ Base lexer for shell sessions.
.. versionadded:: 2.1
"""
-
- _venv = re.compile(r'^(\([^)]*\))(\s*)')
-
+
+ _venv = re.compile(r'^(\([^)]*\))(\s*)')
+
def get_tokens_unprocessed(self, text):
innerlexer = self._innerLexerCls(**self.options)
@@ -169,20 +169,20 @@ class ShellSessionBaseLexer(Lexer):
for match in line_re.finditer(text):
line = match.group()
-
- venv_match = self._venv.match(line)
- if venv_match:
- venv = venv_match.group(1)
- venv_whitespace = venv_match.group(2)
- insertions.append((len(curcode),
- [(0, Generic.Prompt.VirtualEnv, venv)]))
- if venv_whitespace:
- insertions.append((len(curcode),
- [(0, Text, venv_whitespace)]))
- line = line[venv_match.end():]
-
- m = self._ps1rgx.match(line)
- if m:
+
+ venv_match = self._venv.match(line)
+ if venv_match:
+ venv = venv_match.group(1)
+ venv_whitespace = venv_match.group(2)
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt.VirtualEnv, venv)]))
+ if venv_whitespace:
+ insertions.append((len(curcode),
+ [(0, Text, venv_whitespace)]))
+ line = line[venv_match.end():]
+
+ m = self._ps1rgx.match(line)
+ if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
@@ -193,13 +193,13 @@ class ShellSessionBaseLexer(Lexer):
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
backslash_continuation = curcode.endswith('\\\n')
- elif backslash_continuation:
- if line.startswith(self._ps2):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:len(self._ps2)])]))
- curcode += line[len(self._ps2):]
- else:
- curcode += line
+ elif backslash_continuation:
+ if line.startswith(self._ps2):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:len(self._ps2)])]))
+ curcode += line[len(self._ps2):]
+ else:
+ curcode += line
backslash_continuation = curcode.endswith('\\\n')
else:
if insertions:
@@ -217,8 +217,8 @@ class ShellSessionBaseLexer(Lexer):
class BashSessionLexer(ShellSessionBaseLexer):
"""
- Lexer for Bash shell sessions, i.e. command lines, including a
- prompt, interspersed with output.
+ Lexer for Bash shell sessions, i.e. command lines, including a
+ prompt, interspersed with output.
.. versionadded:: 1.1
"""
@@ -229,10 +229,10 @@ class BashSessionLexer(ShellSessionBaseLexer):
mimetypes = ['application/x-shell-session', 'application/x-sh-session']
_innerLexerCls = BashLexer
- _ps1rgx = re.compile(
+ _ps1rgx = re.compile(
r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
- r'?|\[\S+[@:][^\n]+\].+))\s*[$#%]\s*)(.*\n?)')
- _ps2 = '> '
+ r'?|\[\S+[@:][^\n]+\].+))\s*[$#%]\s*)(.*\n?)')
+ _ps2 = '> '
class BatchLexer(RegexLexer):
@@ -242,7 +242,7 @@ class BatchLexer(RegexLexer):
.. versionadded:: 0.7
"""
name = 'Batchfile'
- aliases = ['batch', 'bat', 'dosbatch', 'winbatch']
+ aliases = ['batch', 'bat', 'dosbatch', 'winbatch']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
@@ -251,14 +251,14 @@ class BatchLexer(RegexLexer):
_nl = r'\n\x1a'
_punct = r'&<>|'
_ws = r'\t\v\f\r ,;=\xa0'
- _nlws = r'\s\x1a\xa0,;='
+ _nlws = r'\s\x1a\xa0,;='
_space = r'(?:(?:(?:\^[%s])?[%s])+)' % (_nl, _ws)
_keyword_terminator = (r'(?=(?:\^[%s]?)?[%s+./:[\\\]]|[%s%s(])' %
(_nl, _ws, _nl, _punct))
_token_terminator = r'(?=\^?[%s]|[%s%s])' % (_ws, _punct, _nl)
_start_label = r'((?:(?<=^[^:])|^[^:]?)[%s]*)(:)' % _ws
- _label = r'(?:(?:[^%s%s+:^]|\^[%s]?[\w\W])*)' % (_nlws, _punct, _nl)
- _label_compound = r'(?:(?:[^%s%s+:^)]|\^[%s]?[^)])*)' % (_nlws, _punct, _nl)
+ _label = r'(?:(?:[^%s%s+:^]|\^[%s]?[\w\W])*)' % (_nlws, _punct, _nl)
+ _label_compound = r'(?:(?:[^%s%s+:^)]|\^[%s]?[^)])*)' % (_nlws, _punct, _nl)
_number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator
_opword = r'(?:equ|geq|gtr|leq|lss|neq)'
_string = r'(?:"[^%s"]*(?:"|(?=[%s])))' % (_nl, _nl)
@@ -268,8 +268,8 @@ class BatchLexer(RegexLexer):
r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' %
(_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl))
- _core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s])+)' % (_nl, _nlws, _punct)
- _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s)])+)' % (_nl, _nlws, _punct)
+ _core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s])+)' % (_nl, _nlws, _punct)
+ _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s)])+)' % (_nl, _nlws, _punct)
_token = r'(?:[%s]+|%s)' % (_punct, _core_token)
_token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound)
_stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
@@ -380,8 +380,8 @@ class BatchLexer(RegexLexer):
return state
def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
- _string=_string, _variable=_variable,
- _ws=_ws, _nlws=_nlws):
+ _string=_string, _variable=_variable,
+ _ws=_ws, _nlws=_nlws):
op = r'=+\-*/!~'
state = []
if compound:
@@ -392,8 +392,8 @@ class BatchLexer(RegexLexer):
(r'\d+', Number.Integer),
(r'[(),]+', Punctuation),
(r'([%s]|%%|\^\^)+' % op, Operator),
- (r'(%s|%s|(\^[%s]?)?[^()%s%%\^"%s%s]|\^[%s]?%s)+' %
- (_string, _variable, _nl, op, _nlws, _punct, _nlws,
+ (r'(%s|%s|(\^[%s]?)?[^()%s%%\^"%s%s]|\^[%s]?%s)+' %
+ (_string, _variable, _nl, op, _nlws, _punct, _nlws,
r'[^)]' if compound else r'[\w\W]'),
using(this, state='variable')),
(r'(?=[\x00|&])', Text, '#pop'),
@@ -427,15 +427,15 @@ class BatchLexer(RegexLexer):
_core_token_compound=_core_token_compound,
_nl=_nl, _punct=_punct, _stoken=_stoken,
_string=_string, _space=_space,
- _variable=_variable, _nlws=_nlws):
+ _variable=_variable, _nlws=_nlws):
stoken_compound = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
(_punct, _string, _variable, _core_token_compound))
return [
- (r'((?:(?<=[%s])\d)?)(>>?&|<&)([%s]*)(\d)' %
- (_nlws, _nlws),
+ (r'((?:(?<=[%s])\d)?)(>>?&|<&)([%s]*)(\d)' %
+ (_nlws, _nlws),
bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
- (r'((?:(?<=[%s])(?<!\^[%s])\d)?)(>>?|<)(%s?%s)' %
- (_nlws, _nl, _space, stoken_compound if compound else _stoken),
+ (r'((?:(?<=[%s])(?<!\^[%s])\d)?)(>>?|<)(%s?%s)' %
+ (_nlws, _nl, _space, stoken_compound if compound else _stoken),
bygroups(Number.Integer, Punctuation, using(this, state='text')))
]
@@ -474,7 +474,7 @@ class BatchLexer(RegexLexer):
'text': [
(r'"', String.Double, 'string'),
include('variable-or-escape'),
- (r'[^"%%^%s%s\d)]+|.' % (_nlws, _punct), Text)
+ (r'[^"%%^%s%s\d)]+|.' % (_nlws, _punct), Text)
],
'variable': [
(r'"', String.Double, 'string'),
@@ -495,13 +495,13 @@ class BatchLexer(RegexLexer):
include('follow')
],
'for/f': [
- (r'(")((?:%s|[^"])*?")([%s]*)(\))' % (_variable, _nlws),
+ (r'(")((?:%s|[^"])*?")([%s]*)(\))' % (_variable, _nlws),
bygroups(String.Double, using(this, state='string'), Text,
Punctuation)),
(r'"', String.Double, ('#pop', 'for2', 'string')),
- (r"('(?:%%%%|%s|[\w\W])*?')([%s]*)(\))" % (_variable, _nlws),
+ (r"('(?:%%%%|%s|[\w\W])*?')([%s]*)(\))" % (_variable, _nlws),
bygroups(using(this, state='sqstring'), Text, Punctuation)),
- (r'(`(?:%%%%|%s|[\w\W])*?`)([%s]*)(\))' % (_variable, _nlws),
+ (r'(`(?:%%%%|%s|[\w\W])*?`)([%s]*)(\))' % (_variable, _nlws),
bygroups(using(this, state='bqstring'), Text, Punctuation)),
include('for2')
],
@@ -547,8 +547,8 @@ class BatchLexer(RegexLexer):
class MSDOSSessionLexer(ShellSessionBaseLexer):
"""
- Lexer for MS DOS shell sessions, i.e. command lines, including a
- prompt, interspersed with output.
+ Lexer for MS DOS shell sessions, i.e. command lines, including a
+ prompt, interspersed with output.
.. versionadded:: 2.1
"""
@@ -559,7 +559,7 @@ class MSDOSSessionLexer(ShellSessionBaseLexer):
mimetypes = []
_innerLexerCls = BatchLexer
- _ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
+ _ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
_ps2 = 'More? '
@@ -633,8 +633,8 @@ class TcshLexer(RegexLexer):
class TcshSessionLexer(ShellSessionBaseLexer):
"""
- Lexer for Tcsh sessions, i.e. command lines, including a
- prompt, interspersed with output.
+ Lexer for Tcsh sessions, i.e. command lines, including a
+ prompt, interspersed with output.
.. versionadded:: 2.1
"""
@@ -645,7 +645,7 @@ class TcshSessionLexer(ShellSessionBaseLexer):
mimetypes = []
_innerLexerCls = TcshLexer
- _ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
+ _ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
_ps2 = '? '
@@ -656,7 +656,7 @@ class PowerShellLexer(RegexLexer):
.. versionadded:: 1.5
"""
name = 'PowerShell'
- aliases = ['powershell', 'pwsh', 'posh', 'ps1', 'psm1']
+ aliases = ['powershell', 'pwsh', 'posh', 'ps1', 'psm1']
filenames = ['*.ps1', '*.psm1']
mimetypes = ['text/x-powershell']
@@ -734,7 +734,7 @@ class PowerShellLexer(RegexLexer):
(r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
(r'-[a-z_]\w*', Name),
(r'\w+', Name),
- (r'[.,;:@{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
+ (r'[.,;:@{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
],
'child': [
(r'\)', Punctuation, '#pop'),
@@ -765,19 +765,19 @@ class PowerShellLexer(RegexLexer):
class PowerShellSessionLexer(ShellSessionBaseLexer):
"""
- Lexer for PowerShell sessions, i.e. command lines, including a
- prompt, interspersed with output.
+ Lexer for PowerShell sessions, i.e. command lines, including a
+ prompt, interspersed with output.
.. versionadded:: 2.1
"""
name = 'PowerShell Session'
- aliases = ['pwsh-session', 'ps1con']
+ aliases = ['pwsh-session', 'ps1con']
filenames = []
mimetypes = []
_innerLexerCls = PowerShellLexer
- _ps1rgx = re.compile(r'^((?:\[[^]]+\]: )?PS[^>]*> ?)(.*\n?)')
+ _ps1rgx = re.compile(r'^((?:\[[^]]+\]: )?PS[^>]*> ?)(.*\n?)')
_ps2 = '>> '
@@ -852,62 +852,62 @@ class FishShellLexer(RegexLexer):
include('root'),
],
}
-
-class ExeclineLexer(RegexLexer):
- """
- Lexer for Laurent Bercot's execline language
- (https://skarnet.org/software/execline).
-
- .. versionadded:: 2.7
- """
-
- name = 'execline'
- aliases = ['execline']
- filenames = ['*.exec']
-
- tokens = {
- 'root': [
- include('basic'),
- include('data'),
- include('interp')
- ],
- 'interp': [
- (r'\$\{', String.Interpol, 'curly'),
- (r'\$[\w@#]+', Name.Variable), # user variable
- (r'\$', Text),
- ],
- 'basic': [
- (r'\b(background|backtick|cd|define|dollarat|elgetopt|'
- r'elgetpositionals|elglob|emptyenv|envfile|exec|execlineb|'
- r'exit|export|fdblock|fdclose|fdmove|fdreserve|fdswap|'
- r'forbacktickx|foreground|forstdin|forx|getcwd|getpid|heredoc|'
- r'homeof|if|ifelse|ifte|ifthenelse|importas|loopwhilex|'
- r'multidefine|multisubstitute|pipeline|piperw|posix-cd|'
- r'redirfd|runblock|shift|trap|tryexec|umask|unexport|wait|'
- r'withstdinas)\b', Name.Builtin),
- (r'\A#!.+\n', Comment.Hashbang),
- (r'#.*\n', Comment.Single),
- (r'[{}]', Operator)
- ],
- 'data': [
- (r'(?s)"(\\.|[^"\\$])*"', String.Double),
- (r'"', String.Double, 'string'),
- (r'\s+', Text),
- (r'[^\s{}$"\\]+', Text)
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'(?s)(\\\\|\\.|[^"\\$])+', String.Double),
- include('interp'),
- ],
- 'curly': [
- (r'\}', String.Interpol, '#pop'),
- (r'[\w#@]+', Name.Variable),
- include('root')
- ]
-
- }
-
- def analyse_text(text):
- if shebang_matches(text, r'execlineb'):
- return 1
+
+class ExeclineLexer(RegexLexer):
+ """
+ Lexer for Laurent Bercot's execline language
+ (https://skarnet.org/software/execline).
+
+ .. versionadded:: 2.7
+ """
+
+ name = 'execline'
+ aliases = ['execline']
+ filenames = ['*.exec']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ include('data'),
+ include('interp')
+ ],
+ 'interp': [
+ (r'\$\{', String.Interpol, 'curly'),
+ (r'\$[\w@#]+', Name.Variable), # user variable
+ (r'\$', Text),
+ ],
+ 'basic': [
+ (r'\b(background|backtick|cd|define|dollarat|elgetopt|'
+ r'elgetpositionals|elglob|emptyenv|envfile|exec|execlineb|'
+ r'exit|export|fdblock|fdclose|fdmove|fdreserve|fdswap|'
+ r'forbacktickx|foreground|forstdin|forx|getcwd|getpid|heredoc|'
+ r'homeof|if|ifelse|ifte|ifthenelse|importas|loopwhilex|'
+ r'multidefine|multisubstitute|pipeline|piperw|posix-cd|'
+ r'redirfd|runblock|shift|trap|tryexec|umask|unexport|wait|'
+ r'withstdinas)\b', Name.Builtin),
+ (r'\A#!.+\n', Comment.Hashbang),
+ (r'#.*\n', Comment.Single),
+ (r'[{}]', Operator)
+ ],
+ 'data': [
+ (r'(?s)"(\\.|[^"\\$])*"', String.Double),
+ (r'"', String.Double, 'string'),
+ (r'\s+', Text),
+ (r'[^\s{}$"\\]+', Text)
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)(\\\\|\\.|[^"\\$])+', String.Double),
+ include('interp'),
+ ],
+ 'curly': [
+ (r'\}', String.Interpol, '#pop'),
+ (r'[\w#@]+', Name.Variable),
+ include('root')
+ ]
+
+ }
+
+ def analyse_text(text):
+ if shebang_matches(text, r'execlineb'):
+ return 1
diff --git a/contrib/python/Pygments/py3/pygments/lexers/sieve.py b/contrib/python/Pygments/py3/pygments/lexers/sieve.py
index 6fa33d3181..2dcc4cf76e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/sieve.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/sieve.py
@@ -1,68 +1,68 @@
-"""
- pygments.lexers.sieve
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Sieve file format.
-
- https://tools.ietf.org/html/rfc5228
- https://tools.ietf.org/html/rfc5173
- https://tools.ietf.org/html/rfc5229
- https://tools.ietf.org/html/rfc5230
- https://tools.ietf.org/html/rfc5232
- https://tools.ietf.org/html/rfc5235
- https://tools.ietf.org/html/rfc5429
- https://tools.ietf.org/html/rfc8580
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, Name, Literal, String, Text, Punctuation, Keyword
-
-__all__ = ["SieveLexer"]
-
-
-class SieveLexer(RegexLexer):
- """
- Lexer for sieve format.
- """
- name = 'Sieve'
- filenames = ['*.siv', '*.sieve']
- aliases = ['sieve']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'[();,{}\[\]]', Punctuation),
- # import:
- (r'(?i)require',
- Keyword.Namespace),
- # tags:
- (r'(?i)(:)(addresses|all|contains|content|create|copy|comparator|count|days|detail|domain|fcc|flags|from|handle|importance|is|localpart|length|lowerfirst|lower|matches|message|mime|options|over|percent|quotewildcard|raw|regex|specialuse|subject|text|under|upperfirst|upper|value)',
- bygroups(Name.Tag, Name.Tag)),
- # tokens:
- (r'(?i)(address|addflag|allof|anyof|body|discard|elsif|else|envelope|ereject|exists|false|fileinto|if|hasflag|header|keep|notify_method_capability|notify|not|redirect|reject|removeflag|setflag|size|spamtest|stop|string|true|vacation|virustest)',
- Name.Builtin),
- (r'(?i)set',
- Keyword.Declaration),
- # number:
- (r'([0-9.]+)([kmgKMG])?',
- bygroups(Literal.Number, Literal.Number)),
- # comment:
- (r'#.*$',
- Comment.Single),
- (r'/\*.*\*/',
- Comment.Multiline),
- # string:
- (r'"[^"]*?"',
- String),
- # text block:
- (r'text:',
- Name.Tag, 'text'),
- ],
- 'text': [
- (r'[^.].*?\n', String),
- (r'^\.', Punctuation, "#pop"),
- ]
- }
+"""
+ pygments.lexers.sieve
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Sieve file format.
+
+ https://tools.ietf.org/html/rfc5228
+ https://tools.ietf.org/html/rfc5173
+ https://tools.ietf.org/html/rfc5229
+ https://tools.ietf.org/html/rfc5230
+ https://tools.ietf.org/html/rfc5232
+ https://tools.ietf.org/html/rfc5235
+ https://tools.ietf.org/html/rfc5429
+ https://tools.ietf.org/html/rfc8580
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Comment, Name, Literal, String, Text, Punctuation, Keyword
+
+__all__ = ["SieveLexer"]
+
+
+class SieveLexer(RegexLexer):
+ """
+ Lexer for sieve format.
+ """
+ name = 'Sieve'
+ filenames = ['*.siv', '*.sieve']
+ aliases = ['sieve']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'[();,{}\[\]]', Punctuation),
+ # import:
+ (r'(?i)require',
+ Keyword.Namespace),
+ # tags:
+ (r'(?i)(:)(addresses|all|contains|content|create|copy|comparator|count|days|detail|domain|fcc|flags|from|handle|importance|is|localpart|length|lowerfirst|lower|matches|message|mime|options|over|percent|quotewildcard|raw|regex|specialuse|subject|text|under|upperfirst|upper|value)',
+ bygroups(Name.Tag, Name.Tag)),
+ # tokens:
+ (r'(?i)(address|addflag|allof|anyof|body|discard|elsif|else|envelope|ereject|exists|false|fileinto|if|hasflag|header|keep|notify_method_capability|notify|not|redirect|reject|removeflag|setflag|size|spamtest|stop|string|true|vacation|virustest)',
+ Name.Builtin),
+ (r'(?i)set',
+ Keyword.Declaration),
+ # number:
+ (r'([0-9.]+)([kmgKMG])?',
+ bygroups(Literal.Number, Literal.Number)),
+ # comment:
+ (r'#.*$',
+ Comment.Single),
+ (r'/\*.*\*/',
+ Comment.Multiline),
+ # string:
+ (r'"[^"]*?"',
+ String),
+ # text block:
+ (r'text:',
+ Name.Tag, 'text'),
+ ],
+ 'text': [
+ (r'[^.].*?\n', String),
+ (r'^\.', Punctuation, "#pop"),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/slash.py b/contrib/python/Pygments/py3/pygments/lexers/slash.py
index df0e23de5f..cf6ddb8f2d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/slash.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/slash.py
@@ -5,7 +5,7 @@
Lexer for the `Slash <https://github.com/arturadib/Slash-A>`_ programming
language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -25,7 +25,7 @@ class SlashLanguageLexer(ExtendedRegexLexer):
def right_angle_bracket(lexer, match, ctx):
if len(ctx.stack) > 1 and ctx.stack[-2] == "string":
ctx.stack.pop()
- yield match.start(), String.Interpol, '}'
+ yield match.start(), String.Interpol, '}'
ctx.pos = match.end()
pass
@@ -177,8 +177,8 @@ class SlashLexer(DelegatingLexer):
name = 'Slash'
aliases = ['slash']
- filenames = ['*.sla']
+ filenames = ['*.sla']
def __init__(self, **options):
from pygments.lexers.web import HtmlLexer
- super().__init__(HtmlLexer, SlashLanguageLexer, **options)
+ super().__init__(HtmlLexer, SlashLanguageLexer, **options)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/smalltalk.py b/contrib/python/Pygments/py3/pygments/lexers/smalltalk.py
index ebb3311dd2..bb7122511a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/smalltalk.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/smalltalk.py
@@ -4,7 +4,7 @@
Lexers for Smalltalk and related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -138,7 +138,7 @@ class SmalltalkLexer(RegexLexer):
class NewspeakLexer(RegexLexer):
"""
- For `Newspeak <http://newspeaklanguage.org/>`_ syntax.
+ For `Newspeak <http://newspeaklanguage.org/>`_ syntax.
.. versionadded:: 1.1
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/smithy.py b/contrib/python/Pygments/py3/pygments/lexers/smithy.py
index 0f0a91204c..52cf85cea5 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/smithy.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/smithy.py
@@ -1,79 +1,79 @@
-"""
- pygments.lexers.smithy
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Smithy IDL.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Keyword, Name, String, \
- Number, Whitespace, Punctuation
-
-__all__ = ['SmithyLexer']
-
-
-class SmithyLexer(RegexLexer):
- """
- For Smithy IDL
-
- .. versionadded:: 2.10
- """
- name = 'Smithy'
- filenames = ['*.smithy']
- aliases = ['smithy']
-
- flags = re.MULTILINE | re.UNICODE
- unquoted = r'[A-Za-z0-9_\.#$-]+'
- identifier = r"[A-Za-z0-9_\.#$-]+"
-
- simple_shapes = (
- 'use', 'byte', 'short', 'integer', 'long', 'float', 'document',
- 'double', 'bigInteger', 'bigDecimal', 'boolean', 'blob', 'string',
- 'timestamp',
- )
-
- aggregate_shapes = (
- 'apply', 'list', 'map', 'set', 'structure', 'union', 'resource',
- 'operation', 'service', 'trait'
- )
-
- tokens = {
- 'root': [
- (r'///.*$', Comment.Multiline),
- (r'//.*$', Comment),
- (r'@[0-9a-zA-Z\.#-]*', Name.Decorator),
- (r'(=)', Name.Decorator),
- (r'^(\$version)(:)(.+)',
- bygroups(Keyword.Declaration, Name.Decorator, Name.Class)),
- (r'^(namespace)(\s+' + identifier + r')\b',
- bygroups(Keyword.Declaration, Name.Class)),
- (words(simple_shapes,
- prefix=r'^', suffix=r'(\s+' + identifier + r')\b'),
- bygroups(Keyword.Declaration, Name.Class)),
- (words(aggregate_shapes,
- prefix=r'^', suffix=r'(\s+' + identifier + r')'),
- bygroups(Keyword.Declaration, Name.Class)),
- (r'^(metadata)(\s+.+)(\s*)(=)',
- bygroups(Keyword.Declaration, Name.Class, Whitespace, Name.Decorator)),
- (r"(true|false|null)", Keyword.Constant),
- (r"(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)", Number),
- (identifier + ":", Name.Label),
- (identifier, Name.Variable.Class),
- (r'\[', Text, "#push"),
- (r'\]', Text, "#pop"),
- (r'\(', Text, "#push"),
- (r'\)', Text, "#pop"),
- (r'\{', Text, "#push"),
- (r'\}', Text, "#pop"),
- (r'"{3}(\\\\|\n|\\")*"{3}', String.Doc),
- (r'"(\\\\|\n|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\n|\\'|[^'])*'", String.Single),
- (r'[:,]+', Punctuation),
- (r'\s+', Whitespace),
- ]
- }
+"""
+ pygments.lexers.smithy
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Smithy IDL.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Keyword, Name, String, \
+ Number, Whitespace, Punctuation
+
+__all__ = ['SmithyLexer']
+
+
+class SmithyLexer(RegexLexer):
+ """
+ For Smithy IDL
+
+ .. versionadded:: 2.10
+ """
+ name = 'Smithy'
+ filenames = ['*.smithy']
+ aliases = ['smithy']
+
+ flags = re.MULTILINE | re.UNICODE
+ unquoted = r'[A-Za-z0-9_\.#$-]+'
+ identifier = r"[A-Za-z0-9_\.#$-]+"
+
+ simple_shapes = (
+ 'use', 'byte', 'short', 'integer', 'long', 'float', 'document',
+ 'double', 'bigInteger', 'bigDecimal', 'boolean', 'blob', 'string',
+ 'timestamp',
+ )
+
+ aggregate_shapes = (
+ 'apply', 'list', 'map', 'set', 'structure', 'union', 'resource',
+ 'operation', 'service', 'trait'
+ )
+
+ tokens = {
+ 'root': [
+ (r'///.*$', Comment.Multiline),
+ (r'//.*$', Comment),
+ (r'@[0-9a-zA-Z\.#-]*', Name.Decorator),
+ (r'(=)', Name.Decorator),
+ (r'^(\$version)(:)(.+)',
+ bygroups(Keyword.Declaration, Name.Decorator, Name.Class)),
+ (r'^(namespace)(\s+' + identifier + r')\b',
+ bygroups(Keyword.Declaration, Name.Class)),
+ (words(simple_shapes,
+ prefix=r'^', suffix=r'(\s+' + identifier + r')\b'),
+ bygroups(Keyword.Declaration, Name.Class)),
+ (words(aggregate_shapes,
+ prefix=r'^', suffix=r'(\s+' + identifier + r')'),
+ bygroups(Keyword.Declaration, Name.Class)),
+ (r'^(metadata)(\s+.+)(\s*)(=)',
+ bygroups(Keyword.Declaration, Name.Class, Whitespace, Name.Decorator)),
+ (r"(true|false|null)", Keyword.Constant),
+ (r"(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)", Number),
+ (identifier + ":", Name.Label),
+ (identifier, Name.Variable.Class),
+ (r'\[', Text, "#push"),
+ (r'\]', Text, "#pop"),
+ (r'\(', Text, "#push"),
+ (r'\)', Text, "#pop"),
+ (r'\{', Text, "#push"),
+ (r'\}', Text, "#pop"),
+ (r'"{3}(\\\\|\n|\\")*"{3}', String.Doc),
+ (r'"(\\\\|\n|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\n|\\'|[^'])*'", String.Single),
+ (r'[:,]+', Punctuation),
+ (r'\s+', Whitespace),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/smv.py b/contrib/python/Pygments/py3/pygments/lexers/smv.py
index a4cbf9455e..185fb9ca82 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/smv.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/smv.py
@@ -4,13 +4,13 @@
Lexers for the SMV languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, Text
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+ Punctuation, Text
__all__ = ['NuSMVLexer']
@@ -68,7 +68,7 @@ class NuSMVLexer(RegexLexer):
(r'\-?\d+\b', Number.Integer),
(r'0[su][bB]\d*_[01_]+', Number.Bin),
(r'0[su][oO]\d*_[0-7_]+', Number.Oct),
- (r'0[su][dD]\d*_[\d_]+', Number.Decimal),
+ (r'0[su][dD]\d*_[\d_]+', Number.Decimal),
(r'0[su][hH]\d*_[\da-fA-F_]+', Number.Hex),
# Whitespace, punctuation and the rest
diff --git a/contrib/python/Pygments/py3/pygments/lexers/snobol.py b/contrib/python/Pygments/py3/pygments/lexers/snobol.py
index b5719c3a5a..8a8b480123 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/snobol.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/snobol.py
@@ -4,7 +4,7 @@
Lexers for the SNOBOL language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/solidity.py b/contrib/python/Pygments/py3/pygments/lexers/solidity.py
index 0c42586fae..a3679641be 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/solidity.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/solidity.py
@@ -1,91 +1,91 @@
-"""
- pygments.lexers.solidity
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Solidity.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['SolidityLexer']
-
-
-class SolidityLexer(RegexLexer):
- """
- For Solidity source code.
-
- .. versionadded:: 2.5
- """
-
- name = 'Solidity'
- aliases = ['solidity']
- filenames = ['*.sol']
- mimetypes = []
-
- flags = re.MULTILINE | re.UNICODE
-
- datatype = (
- r'\b(address|bool|(?:(?:bytes|hash|int|string|uint)(?:8|16|24|32|40|48|56|64'
- r'|72|80|88|96|104|112|120|128|136|144|152|160|168|176|184|192|200|208'
- r'|216|224|232|240|248|256)?))\b'
- )
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comments'),
- (r'\bpragma\s+solidity\b', Keyword, 'pragma'),
- (r'\b(contract)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword, Whitespace, Name.Entity)),
- (datatype + r'(\s+)((?:external|public|internal|private)\s+)?' +
- r'([a-zA-Z_]\w*)',
- bygroups(Keyword.Type, Whitespace, Keyword, Name.Variable)),
- (r'\b(enum|event|function|struct)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Type, Whitespace, Name.Variable)),
- (r'\b(msg|block|tx)\.([A-Za-z_][a-zA-Z0-9_]*)\b', Keyword),
- (words((
- 'block', 'break', 'constant', 'constructor', 'continue',
- 'contract', 'do', 'else', 'external', 'false', 'for',
- 'function', 'if', 'import', 'inherited', 'internal', 'is',
- 'library', 'mapping', 'memory', 'modifier', 'msg', 'new',
- 'payable', 'private', 'public', 'require', 'return',
- 'returns', 'struct', 'suicide', 'throw', 'this', 'true',
- 'tx', 'var', 'while'), prefix=r'\b', suffix=r'\b'),
- Keyword.Type),
- (words(('keccak256',), prefix=r'\b', suffix=r'\b'), Name.Builtin),
- (datatype, Keyword.Type),
- include('constants'),
- (r'[a-zA-Z_]\w*', Text),
- (r'[!<=>+*/-]', Operator),
- (r'[.;:{}(),\[\]]', Punctuation)
- ],
- 'comments': [
- (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
- (r'/(\\\n)?[*][\w\W]*', Comment.Multiline)
- ],
- 'constants': [
- (r'("(\\"|.)*?")', String.Double),
- (r"('(\\'|.)*?')", String.Single),
- (r'\b0[xX][0-9a-fA-F]+\b', Number.Hex),
- (r'\b\d+\b', Number.Decimal),
- ],
- 'pragma': [
- include('whitespace'),
- include('comments'),
- (r'(\^|>=|<)(\s*)(\d+\.\d+\.\d+)',
- bygroups(Operator, Whitespace, Keyword)),
- (r';', Punctuation, '#pop')
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- (r'\n', Whitespace)
- ]
- }
+"""
+ pygments.lexers.solidity
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Solidity.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace
+
+__all__ = ['SolidityLexer']
+
+
+class SolidityLexer(RegexLexer):
+ """
+ For Solidity source code.
+
+ .. versionadded:: 2.5
+ """
+
+ name = 'Solidity'
+ aliases = ['solidity']
+ filenames = ['*.sol']
+ mimetypes = []
+
+ flags = re.MULTILINE | re.UNICODE
+
+ datatype = (
+ r'\b(address|bool|(?:(?:bytes|hash|int|string|uint)(?:8|16|24|32|40|48|56|64'
+ r'|72|80|88|96|104|112|120|128|136|144|152|160|168|176|184|192|200|208'
+ r'|216|224|232|240|248|256)?))\b'
+ )
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('comments'),
+ (r'\bpragma\s+solidity\b', Keyword, 'pragma'),
+ (r'\b(contract)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword, Whitespace, Name.Entity)),
+ (datatype + r'(\s+)((?:external|public|internal|private)\s+)?' +
+ r'([a-zA-Z_]\w*)',
+ bygroups(Keyword.Type, Whitespace, Keyword, Name.Variable)),
+ (r'\b(enum|event|function|struct)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword.Type, Whitespace, Name.Variable)),
+ (r'\b(msg|block|tx)\.([A-Za-z_][a-zA-Z0-9_]*)\b', Keyword),
+ (words((
+ 'block', 'break', 'constant', 'constructor', 'continue',
+ 'contract', 'do', 'else', 'external', 'false', 'for',
+ 'function', 'if', 'import', 'inherited', 'internal', 'is',
+ 'library', 'mapping', 'memory', 'modifier', 'msg', 'new',
+ 'payable', 'private', 'public', 'require', 'return',
+ 'returns', 'struct', 'suicide', 'throw', 'this', 'true',
+ 'tx', 'var', 'while'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Type),
+ (words(('keccak256',), prefix=r'\b', suffix=r'\b'), Name.Builtin),
+ (datatype, Keyword.Type),
+ include('constants'),
+ (r'[a-zA-Z_]\w*', Text),
+ (r'[!<=>+*/-]', Operator),
+ (r'[.;:{}(),\[\]]', Punctuation)
+ ],
+ 'comments': [
+ (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
+ (r'/(\\\n)?[*][\w\W]*', Comment.Multiline)
+ ],
+ 'constants': [
+ (r'("(\\"|.)*?")', String.Double),
+ (r"('(\\'|.)*?')", String.Single),
+ (r'\b0[xX][0-9a-fA-F]+\b', Number.Hex),
+ (r'\b\d+\b', Number.Decimal),
+ ],
+ 'pragma': [
+ include('whitespace'),
+ include('comments'),
+ (r'(\^|>=|<)(\s*)(\d+\.\d+\.\d+)',
+ bygroups(Operator, Whitespace, Keyword)),
+ (r';', Punctuation, '#pop')
+ ],
+ 'whitespace': [
+ (r'\s+', Whitespace),
+ (r'\n', Whitespace)
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/special.py b/contrib/python/Pygments/py3/pygments/lexers/special.py
index bff6652c56..7bdfaa81e0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/special.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/special.py
@@ -4,19 +4,19 @@
Special lexers.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import ast
+import ast
import re
from pygments.lexer import Lexer
-from pygments.token import Token, Error, Text, Generic
-from pygments.util import get_choice_opt
+from pygments.token import Token, Error, Text, Generic
+from pygments.util import get_choice_opt
-__all__ = ['TextLexer', 'OutputLexer', 'RawTokenLexer']
+__all__ = ['TextLexer', 'OutputLexer', 'RawTokenLexer']
class TextLexer(Lexer):
@@ -35,28 +35,28 @@ class TextLexer(Lexer):
def analyse_text(text):
return TextLexer.priority
-
-class OutputLexer(Lexer):
- """
- Simple lexer that highlights everything as ``Token.Generic.Output``.
-
- .. versionadded:: 2.10
- """
- name = 'Text output'
- aliases = ['output']
-
- def get_tokens_unprocessed(self, text):
- yield 0, Generic.Output, text
-
-
+
+class OutputLexer(Lexer):
+ """
+ Simple lexer that highlights everything as ``Token.Generic.Output``.
+
+ .. versionadded:: 2.10
+ """
+ name = 'Text output'
+ aliases = ['output']
+
+ def get_tokens_unprocessed(self, text):
+ yield 0, Generic.Output, text
+
+
_ttype_cache = {}
-line_re = re.compile('.*?\n')
+line_re = re.compile('.*?\n')
class RawTokenLexer(Lexer):
"""
- Recreate a token stream formatted with the `RawTokenFormatter`.
+ Recreate a token stream formatted with the `RawTokenFormatter`.
Additional options accepted:
@@ -65,7 +65,7 @@ class RawTokenLexer(Lexer):
the given compression algorithm before lexing (default: ``""``).
"""
name = 'Raw token data'
- aliases = []
+ aliases = []
filenames = []
mimetypes = ['application/x-pygments-tokens']
@@ -75,23 +75,23 @@ class RawTokenLexer(Lexer):
Lexer.__init__(self, **options)
def get_tokens(self, text):
- if self.compress:
- if isinstance(text, str):
- text = text.encode('latin1')
- try:
- if self.compress == 'gz':
- import gzip
- text = gzip.decompress(text)
- elif self.compress == 'bz2':
- import bz2
- text = bz2.decompress(text)
- except OSError:
- yield Error, text.decode('latin1')
- if isinstance(text, bytes):
- text = text.decode('latin1')
-
- # do not call Lexer.get_tokens() because stripping is not optional.
- text = text.strip('\n') + '\n'
+ if self.compress:
+ if isinstance(text, str):
+ text = text.encode('latin1')
+ try:
+ if self.compress == 'gz':
+ import gzip
+ text = gzip.decompress(text)
+ elif self.compress == 'bz2':
+ import bz2
+ text = bz2.decompress(text)
+ except OSError:
+ yield Error, text.decode('latin1')
+ if isinstance(text, bytes):
+ text = text.decode('latin1')
+
+ # do not call Lexer.get_tokens() because stripping is not optional.
+ text = text.strip('\n') + '\n'
for i, t, v in self.get_tokens_unprocessed(text):
yield t, v
@@ -99,7 +99,7 @@ class RawTokenLexer(Lexer):
length = 0
for match in line_re.finditer(text):
try:
- ttypestr, val = match.group().rstrip().split('\t', 1)
+ ttypestr, val = match.group().rstrip().split('\t', 1)
ttype = _ttype_cache.get(ttypestr)
if not ttype:
ttype = Token
@@ -109,11 +109,11 @@ class RawTokenLexer(Lexer):
raise ValueError('malformed token name')
ttype = getattr(ttype, ttype_)
_ttype_cache[ttypestr] = ttype
- val = ast.literal_eval(val)
- if not isinstance(val, str):
- raise ValueError('expected str')
- except (SyntaxError, ValueError):
- val = match.group()
- ttype = Error
+ val = ast.literal_eval(val)
+ if not isinstance(val, str):
+ raise ValueError('expected str')
+ except (SyntaxError, ValueError):
+ val = match.group()
+ ttype = Error
yield length, ttype, val
length += len(val)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/sql.py b/contrib/python/Pygments/py3/pygments/lexers/sql.py
index 752f135005..534a3d2b42 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/sql.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/sql.py
@@ -33,26 +33,26 @@
The ``tests/examplefiles`` contains a few test files with data to be
parsed by these lexers.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words
-from pygments.token import Punctuation, Whitespace, Text, Comment, Operator, \
- Keyword, Name, String, Number, Generic, Literal
+from pygments.token import Punctuation, Whitespace, Text, Comment, Operator, \
+ Keyword, Name, String, Number, Generic, Literal
from pygments.lexers import get_lexer_by_name, ClassNotFound
from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
PSEUDO_TYPES, PLPGSQL_KEYWORDS
-from pygments.lexers._mysql_builtins import \
- MYSQL_CONSTANTS, \
- MYSQL_DATATYPES, \
- MYSQL_FUNCTIONS, \
- MYSQL_KEYWORDS, \
- MYSQL_OPTIMIZER_HINTS
-
+from pygments.lexers._mysql_builtins import \
+ MYSQL_CONSTANTS, \
+ MYSQL_DATATYPES, \
+ MYSQL_FUNCTIONS, \
+ MYSQL_KEYWORDS, \
+ MYSQL_OPTIMIZER_HINTS
+
from pygments.lexers import _tsql_builtins
@@ -81,28 +81,28 @@ def language_callback(lexer, match):
The lexer is chosen looking for a nearby LANGUAGE or assumed as
plpgsql if inside a DO statement and no LANGUAGE has been found.
"""
- lx = None
+ lx = None
m = language_re.match(lexer.text[match.end():match.end()+100])
if m is not None:
- lx = lexer._get_lexer(m.group(1))
+ lx = lexer._get_lexer(m.group(1))
else:
m = list(language_re.finditer(
lexer.text[max(0, match.start()-100):match.start()]))
if m:
- lx = lexer._get_lexer(m[-1].group(1))
+ lx = lexer._get_lexer(m[-1].group(1))
else:
m = list(do_re.finditer(
lexer.text[max(0, match.start()-25):match.start()]))
if m:
- lx = lexer._get_lexer('plpgsql')
+ lx = lexer._get_lexer('plpgsql')
# 1 = $, 2 = delimiter, 3 = $
yield (match.start(1), String, match.group(1))
yield (match.start(2), String.Delimiter, match.group(2))
yield (match.start(3), String, match.group(3))
# 4 = string contents
- if lx:
- yield from lx.get_tokens_unprocessed(match.group(4))
+ if lx:
+ yield from lx.get_tokens_unprocessed(match.group(4))
else:
yield (match.start(4), String, match.group(4))
# 5 = $, 6 = delimiter, 7 = $
@@ -111,7 +111,7 @@ def language_callback(lexer, match):
yield (match.start(7), String, match.group(7))
-class PostgresBase:
+class PostgresBase:
"""Base class for Postgres-related lexers.
This is implemented as a mixin to avoid the Lexer metaclass kicking in.
@@ -123,7 +123,7 @@ class PostgresBase:
def get_tokens_unprocessed(self, text, *args):
# Have a copy of the entire text to be used by `language_callback`.
self.text = text
- yield from super().get_tokens_unprocessed(text, *args)
+ yield from super().get_tokens_unprocessed(text, *args)
def _get_lexer(self, lang):
if lang.lower() == 'sql':
@@ -137,9 +137,9 @@ class PostgresBase:
if lang.startswith('pl') and lang.endswith('u'):
tries.append(lang[2:-1])
- for lx in tries:
+ for lx in tries:
try:
- return get_lexer_by_name(lx, **self.options)
+ return get_lexer_by_name(lx, **self.options)
except ClassNotFound:
pass
else:
@@ -166,8 +166,8 @@ class PostgresLexer(PostgresBase, RegexLexer):
(r'--.*\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'(' + '|'.join(s.replace(" ", r"\s+")
- for s in DATATYPES + PSEUDO_TYPES) + r')\b',
- Name.Builtin),
+ for s in DATATYPES + PSEUDO_TYPES) + r')\b',
+ Name.Builtin),
(words(KEYWORDS, suffix=r'\b'), Keyword),
(r'[+*/<>=~!@#%^&|`?-]+', Operator),
(r'::', Operator), # cast
@@ -215,7 +215,7 @@ class PlPgsqlLexer(PostgresBase, RegexLexer):
mimetypes = ['text/x-plpgsql']
flags = re.IGNORECASE
- tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()}
+ tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()}
# extend the keywords list
for i, pattern in enumerate(tokens['root']):
@@ -249,7 +249,7 @@ class PsqlRegexLexer(PostgresBase, RegexLexer):
aliases = [] # not public
flags = re.IGNORECASE
- tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()}
+ tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()}
tokens['root'].append(
(r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
@@ -263,7 +263,7 @@ class PsqlRegexLexer(PostgresBase, RegexLexer):
(r"[^\s]+", String.Symbol),
]
-
+
re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
re_psql_command = re.compile(r'\s*\\')
re_end_command = re.compile(r';\s*(--.*?)?$')
@@ -274,7 +274,7 @@ re_message = re.compile(
r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
-class lookahead:
+class lookahead:
"""Wrap an iterator and allow pushing back an item."""
def __init__(self, x):
self.iter = iter(x)
@@ -323,7 +323,7 @@ class PostgresConsoleLexer(Lexer):
# Identify a shell prompt in case of psql commandline example
if line.startswith('$') and not curcode:
lexer = get_lexer_by_name('console', **self.options)
- yield from lexer.get_tokens_unprocessed(line)
+ yield from lexer.get_tokens_unprocessed(line)
break
# Identify a psql prompt
@@ -343,8 +343,8 @@ class PostgresConsoleLexer(Lexer):
break
# Emit the combined stream of command and prompt(s)
- yield from do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode))
+ yield from do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode))
# Emit the output lines
out_token = Generic.Output
@@ -386,99 +386,99 @@ class SqlLexer(RegexLexer):
(r'--.*\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(words((
- 'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER',
- 'AGGREGATE', 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE',
- 'AND', 'ANY', 'ARE', 'AS', 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT',
- 'ASYMMETRIC', 'AT', 'ATOMIC', 'AUTHORIZATION', 'AVG', 'BACKWARD',
- 'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR', 'BIT_LENGTH', 'BOTH', 'BREADTH',
- 'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY', 'CASCADE',
- 'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN',
+ 'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER',
+ 'AGGREGATE', 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE',
+ 'AND', 'ANY', 'ARE', 'AS', 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT',
+ 'ASYMMETRIC', 'AT', 'ATOMIC', 'AUTHORIZATION', 'AVG', 'BACKWARD',
+ 'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR', 'BIT_LENGTH', 'BOTH', 'BREADTH',
+ 'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY', 'CASCADE',
+ 'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN',
'CHARACTERISTICS', 'CHARACTER_LENGTH', 'CHARACTER_SET_CATALOG',
'CHARACTER_SET_NAME', 'CHARACTER_SET_SCHEMA', 'CHAR_LENGTH', 'CHECK',
- 'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE',
- 'CLUSTER', 'COALESCE', 'COBOL', 'COLLATE', 'COLLATION',
- 'COLLATION_CATALOG', 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN',
- 'COLUMN_NAME', 'COMMAND_FUNCTION', 'COMMAND_FUNCTION_CODE', 'COMMENT',
- 'COMMIT', 'COMMITTED', 'COMPLETION', 'CONDITION_NUMBER', 'CONNECT',
- 'CONNECTION', 'CONNECTION_NAME', 'CONSTRAINT', 'CONSTRAINTS',
- 'CONSTRAINT_CATALOG', 'CONSTRAINT_NAME', 'CONSTRAINT_SCHEMA',
- 'CONSTRUCTOR', 'CONTAINS', 'CONTINUE', 'CONVERSION', 'CONVERT',
- 'COPY', 'CORRESPONDING', 'COUNT', 'CREATE', 'CREATEDB', 'CREATEUSER',
- 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE', 'CURRENT_PATH',
- 'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
- 'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE',
+ 'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE',
+ 'CLUSTER', 'COALESCE', 'COBOL', 'COLLATE', 'COLLATION',
+ 'COLLATION_CATALOG', 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN',
+ 'COLUMN_NAME', 'COMMAND_FUNCTION', 'COMMAND_FUNCTION_CODE', 'COMMENT',
+ 'COMMIT', 'COMMITTED', 'COMPLETION', 'CONDITION_NUMBER', 'CONNECT',
+ 'CONNECTION', 'CONNECTION_NAME', 'CONSTRAINT', 'CONSTRAINTS',
+ 'CONSTRAINT_CATALOG', 'CONSTRAINT_NAME', 'CONSTRAINT_SCHEMA',
+ 'CONSTRUCTOR', 'CONTAINS', 'CONTINUE', 'CONVERSION', 'CONVERT',
+ 'COPY', 'CORRESPONDING', 'COUNT', 'CREATE', 'CREATEDB', 'CREATEUSER',
+ 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE', 'CURRENT_PATH',
+ 'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
+ 'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE',
'DATETIME_INTERVAL_CODE', 'DATETIME_INTERVAL_PRECISION', 'DAY',
- 'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE',
- 'DEFERRED', 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS',
- 'DEREF', 'DESC', 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR',
- 'DETERMINISTIC', 'DIAGNOSTICS', 'DICTIONARY', 'DISCONNECT', 'DISPATCH',
- 'DISTINCT', 'DO', 'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION',
- 'DYNAMIC_FUNCTION_CODE', 'EACH', 'ELSE', 'ELSIF', 'ENCODING',
- 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY', 'EXCEPTION',
- 'EXCEPT', 'EXCLUDING', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTING',
- 'EXISTS', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FALSE', 'FETCH', 'FINAL',
- 'FIRST', 'FOR', 'FORCE', 'FOREIGN', 'FORTRAN', 'FORWARD', 'FOUND', 'FREE',
- 'FREEZE', 'FROM', 'FULL', 'FUNCTION', 'G', 'GENERAL', 'GENERATED', 'GET',
- 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GRANTED', 'GROUP', 'GROUPING',
- 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY', 'IF',
- 'IGNORE', 'ILIKE', 'IMMEDIATE', 'IMMEDIATELY', 'IMMUTABLE', 'IMPLEMENTATION', 'IMPLICIT',
- 'IN', 'INCLUDING', 'INCREMENT', 'INDEX', 'INDITCATOR', 'INFIX',
- 'INHERITS', 'INITIALIZE', 'INITIALLY', 'INNER', 'INOUT', 'INPUT',
- 'INSENSITIVE', 'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INTERSECT', 'INTO',
- 'INVOKER', 'IS', 'ISNULL', 'ISOLATION', 'ITERATE', 'JOIN', 'KEY',
- 'KEY_MEMBER', 'KEY_TYPE', 'LANCOMPILER', 'LANGUAGE', 'LARGE', 'LAST',
- 'LATERAL', 'LEADING', 'LEFT', 'LENGTH', 'LESS', 'LEVEL', 'LIKE', 'LIMIT',
- 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOCATION',
- 'LOCATOR', 'LOCK', 'LOWER', 'MAP', 'MATCH', 'MAX', 'MAXVALUE',
- 'MESSAGE_LENGTH', 'MESSAGE_OCTET_LENGTH', 'MESSAGE_TEXT', 'METHOD', 'MIN',
- 'MINUTE', 'MINVALUE', 'MOD', 'MODE', 'MODIFIES', 'MODIFY', 'MONTH',
- 'MORE', 'MOVE', 'MUMPS', 'NAMES', 'NATIONAL', 'NATURAL', 'NCHAR', 'NCLOB',
- 'NEW', 'NEXT', 'NO', 'NOCREATEDB', 'NOCREATEUSER', 'NONE', 'NOT',
- 'NOTHING', 'NOTIFY', 'NOTNULL', 'NULL', 'NULLABLE', 'NULLIF', 'OBJECT',
- 'OCTET_LENGTH', 'OF', 'OFF', 'OFFSET', 'OIDS', 'OLD', 'ON', 'ONLY',
- 'OPEN', 'OPERATION', 'OPERATOR', 'OPTION', 'OPTIONS', 'OR', 'ORDER',
- 'ORDINALITY', 'OUT', 'OUTER', 'OUTPUT', 'OVERLAPS', 'OVERLAY',
- 'OVERRIDING', 'OWNER', 'PAD', 'PARAMETER', 'PARAMETERS', 'PARAMETER_MODE',
- 'PARAMETER_NAME', 'PARAMETER_ORDINAL_POSITION',
- 'PARAMETER_SPECIFIC_CATALOG', 'PARAMETER_SPECIFIC_NAME',
- 'PARAMETER_SPECIFIC_SCHEMA', 'PARTIAL', 'PASCAL', 'PENDANT', 'PERIOD', 'PLACING',
- 'PLI', 'POSITION', 'POSTFIX', 'PRECEEDS', 'PRECISION', 'PREFIX', 'PREORDER',
- 'PREPARE', 'PRESERVE', 'PRIMARY', 'PRIOR', 'PRIVILEGES', 'PROCEDURAL',
- 'PROCEDURE', 'PUBLIC', 'READ', 'READS', 'RECHECK', 'RECURSIVE', 'REF',
- 'REFERENCES', 'REFERENCING', 'REINDEX', 'RELATIVE', 'RENAME',
- 'REPEATABLE', 'REPLACE', 'RESET', 'RESTART', 'RESTRICT', 'RESULT',
- 'RETURN', 'RETURNED_LENGTH', 'RETURNED_OCTET_LENGTH', 'RETURNED_SQLSTATE',
- 'RETURNS', 'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROLLUP', 'ROUTINE',
- 'ROUTINE_CATALOG', 'ROUTINE_NAME', 'ROUTINE_SCHEMA', 'ROW', 'ROWS',
- 'ROW_COUNT', 'RULE', 'SAVE_POINT', 'SCALE', 'SCHEMA', 'SCHEMA_NAME',
- 'SCOPE', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY', 'SELECT', 'SELF',
- 'SENSITIVE', 'SERIALIZABLE', 'SERVER_NAME', 'SESSION', 'SESSION_USER',
- 'SET', 'SETOF', 'SETS', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE', 'SIZE',
- 'SOME', 'SOURCE', 'SPACE', 'SPECIFIC', 'SPECIFICTYPE', 'SPECIFIC_NAME',
- 'SQL', 'SQLCODE', 'SQLERROR', 'SQLEXCEPTION', 'SQLSTATE', 'SQLWARNINIG',
- 'STABLE', 'START', 'STATE', 'STATEMENT', 'STATIC', 'STATISTICS', 'STDIN',
- 'STDOUT', 'STORAGE', 'STRICT', 'STRUCTURE', 'STYPE', 'SUBCLASS_ORIGIN',
- 'SUBLIST', 'SUBSTRING', 'SUCCEEDS', 'SUM', 'SYMMETRIC', 'SYSID', 'SYSTEM',
- 'SYSTEM_USER', 'TABLE', 'TABLE_NAME', ' TEMP', 'TEMPLATE', 'TEMPORARY',
- 'TERMINATE', 'THAN', 'THEN', 'TIME', 'TIMESTAMP', 'TIMEZONE_HOUR',
- 'TIMEZONE_MINUTE', 'TO', 'TOAST', 'TRAILING', 'TRANSACTION',
- 'TRANSACTIONS_COMMITTED', 'TRANSACTIONS_ROLLED_BACK', 'TRANSACTION_ACTIVE',
- 'TRANSFORM', 'TRANSFORMS', 'TRANSLATE', 'TRANSLATION', 'TREAT', 'TRIGGER',
- 'TRIGGER_CATALOG', 'TRIGGER_NAME', 'TRIGGER_SCHEMA', 'TRIM', 'TRUE',
- 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNCOMMITTED', 'UNDER', 'UNENCRYPTED',
- 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN', 'UNNAMED', 'UNNEST', 'UNTIL',
- 'UPDATE', 'UPPER', 'USAGE', 'USER', 'USER_DEFINED_TYPE_CATALOG',
- 'USER_DEFINED_TYPE_NAME', 'USER_DEFINED_TYPE_SCHEMA', 'USING', 'VACUUM',
- 'VALID', 'VALIDATOR', 'VALUES', 'VARIABLE', 'VERBOSE',
- 'VERSION', 'VERSIONS', 'VERSIONING', 'VIEW',
- 'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE', 'WITH', 'WITHOUT', 'WORK',
- 'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'),
+ 'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE',
+ 'DEFERRED', 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS',
+ 'DEREF', 'DESC', 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR',
+ 'DETERMINISTIC', 'DIAGNOSTICS', 'DICTIONARY', 'DISCONNECT', 'DISPATCH',
+ 'DISTINCT', 'DO', 'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION',
+ 'DYNAMIC_FUNCTION_CODE', 'EACH', 'ELSE', 'ELSIF', 'ENCODING',
+ 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY', 'EXCEPTION',
+ 'EXCEPT', 'EXCLUDING', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTING',
+ 'EXISTS', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FALSE', 'FETCH', 'FINAL',
+ 'FIRST', 'FOR', 'FORCE', 'FOREIGN', 'FORTRAN', 'FORWARD', 'FOUND', 'FREE',
+ 'FREEZE', 'FROM', 'FULL', 'FUNCTION', 'G', 'GENERAL', 'GENERATED', 'GET',
+ 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GRANTED', 'GROUP', 'GROUPING',
+ 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY', 'IF',
+ 'IGNORE', 'ILIKE', 'IMMEDIATE', 'IMMEDIATELY', 'IMMUTABLE', 'IMPLEMENTATION', 'IMPLICIT',
+ 'IN', 'INCLUDING', 'INCREMENT', 'INDEX', 'INDITCATOR', 'INFIX',
+ 'INHERITS', 'INITIALIZE', 'INITIALLY', 'INNER', 'INOUT', 'INPUT',
+ 'INSENSITIVE', 'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INTERSECT', 'INTO',
+ 'INVOKER', 'IS', 'ISNULL', 'ISOLATION', 'ITERATE', 'JOIN', 'KEY',
+ 'KEY_MEMBER', 'KEY_TYPE', 'LANCOMPILER', 'LANGUAGE', 'LARGE', 'LAST',
+ 'LATERAL', 'LEADING', 'LEFT', 'LENGTH', 'LESS', 'LEVEL', 'LIKE', 'LIMIT',
+ 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOCATION',
+ 'LOCATOR', 'LOCK', 'LOWER', 'MAP', 'MATCH', 'MAX', 'MAXVALUE',
+ 'MESSAGE_LENGTH', 'MESSAGE_OCTET_LENGTH', 'MESSAGE_TEXT', 'METHOD', 'MIN',
+ 'MINUTE', 'MINVALUE', 'MOD', 'MODE', 'MODIFIES', 'MODIFY', 'MONTH',
+ 'MORE', 'MOVE', 'MUMPS', 'NAMES', 'NATIONAL', 'NATURAL', 'NCHAR', 'NCLOB',
+ 'NEW', 'NEXT', 'NO', 'NOCREATEDB', 'NOCREATEUSER', 'NONE', 'NOT',
+ 'NOTHING', 'NOTIFY', 'NOTNULL', 'NULL', 'NULLABLE', 'NULLIF', 'OBJECT',
+ 'OCTET_LENGTH', 'OF', 'OFF', 'OFFSET', 'OIDS', 'OLD', 'ON', 'ONLY',
+ 'OPEN', 'OPERATION', 'OPERATOR', 'OPTION', 'OPTIONS', 'OR', 'ORDER',
+ 'ORDINALITY', 'OUT', 'OUTER', 'OUTPUT', 'OVERLAPS', 'OVERLAY',
+ 'OVERRIDING', 'OWNER', 'PAD', 'PARAMETER', 'PARAMETERS', 'PARAMETER_MODE',
+ 'PARAMETER_NAME', 'PARAMETER_ORDINAL_POSITION',
+ 'PARAMETER_SPECIFIC_CATALOG', 'PARAMETER_SPECIFIC_NAME',
+ 'PARAMETER_SPECIFIC_SCHEMA', 'PARTIAL', 'PASCAL', 'PENDANT', 'PERIOD', 'PLACING',
+ 'PLI', 'POSITION', 'POSTFIX', 'PRECEEDS', 'PRECISION', 'PREFIX', 'PREORDER',
+ 'PREPARE', 'PRESERVE', 'PRIMARY', 'PRIOR', 'PRIVILEGES', 'PROCEDURAL',
+ 'PROCEDURE', 'PUBLIC', 'READ', 'READS', 'RECHECK', 'RECURSIVE', 'REF',
+ 'REFERENCES', 'REFERENCING', 'REINDEX', 'RELATIVE', 'RENAME',
+ 'REPEATABLE', 'REPLACE', 'RESET', 'RESTART', 'RESTRICT', 'RESULT',
+ 'RETURN', 'RETURNED_LENGTH', 'RETURNED_OCTET_LENGTH', 'RETURNED_SQLSTATE',
+ 'RETURNS', 'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROLLUP', 'ROUTINE',
+ 'ROUTINE_CATALOG', 'ROUTINE_NAME', 'ROUTINE_SCHEMA', 'ROW', 'ROWS',
+ 'ROW_COUNT', 'RULE', 'SAVE_POINT', 'SCALE', 'SCHEMA', 'SCHEMA_NAME',
+ 'SCOPE', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY', 'SELECT', 'SELF',
+ 'SENSITIVE', 'SERIALIZABLE', 'SERVER_NAME', 'SESSION', 'SESSION_USER',
+ 'SET', 'SETOF', 'SETS', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE', 'SIZE',
+ 'SOME', 'SOURCE', 'SPACE', 'SPECIFIC', 'SPECIFICTYPE', 'SPECIFIC_NAME',
+ 'SQL', 'SQLCODE', 'SQLERROR', 'SQLEXCEPTION', 'SQLSTATE', 'SQLWARNINIG',
+ 'STABLE', 'START', 'STATE', 'STATEMENT', 'STATIC', 'STATISTICS', 'STDIN',
+ 'STDOUT', 'STORAGE', 'STRICT', 'STRUCTURE', 'STYPE', 'SUBCLASS_ORIGIN',
+ 'SUBLIST', 'SUBSTRING', 'SUCCEEDS', 'SUM', 'SYMMETRIC', 'SYSID', 'SYSTEM',
+ 'SYSTEM_USER', 'TABLE', 'TABLE_NAME', ' TEMP', 'TEMPLATE', 'TEMPORARY',
+ 'TERMINATE', 'THAN', 'THEN', 'TIME', 'TIMESTAMP', 'TIMEZONE_HOUR',
+ 'TIMEZONE_MINUTE', 'TO', 'TOAST', 'TRAILING', 'TRANSACTION',
+ 'TRANSACTIONS_COMMITTED', 'TRANSACTIONS_ROLLED_BACK', 'TRANSACTION_ACTIVE',
+ 'TRANSFORM', 'TRANSFORMS', 'TRANSLATE', 'TRANSLATION', 'TREAT', 'TRIGGER',
+ 'TRIGGER_CATALOG', 'TRIGGER_NAME', 'TRIGGER_SCHEMA', 'TRIM', 'TRUE',
+ 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNCOMMITTED', 'UNDER', 'UNENCRYPTED',
+ 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN', 'UNNAMED', 'UNNEST', 'UNTIL',
+ 'UPDATE', 'UPPER', 'USAGE', 'USER', 'USER_DEFINED_TYPE_CATALOG',
+ 'USER_DEFINED_TYPE_NAME', 'USER_DEFINED_TYPE_SCHEMA', 'USING', 'VACUUM',
+ 'VALID', 'VALIDATOR', 'VALUES', 'VARIABLE', 'VERBOSE',
+ 'VERSION', 'VERSIONS', 'VERSIONING', 'VIEW',
+ 'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE', 'WITH', 'WITHOUT', 'WORK',
+ 'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'),
Keyword),
(words((
- 'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR',
- 'CHARACTER', 'DATE', 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER',
- 'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL', 'SERIAL', 'SMALLINT',
- 'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'),
+ 'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR',
+ 'CHARACTER', 'DATE', 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER',
+ 'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL', 'SERIAL', 'SMALLINT',
+ 'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'),
Name.Builtin),
(r'[+*/<>=~!@#%^&|`?-]', Operator),
(r'[0-9]+', Number.Integer),
@@ -518,7 +518,7 @@ class TransactSqlLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Whitespace),
- (r'--.*?$\n?', Comment.Single),
+ (r'--.*?$\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(words(_tsql_builtins.OPERATORS), Operator),
(words(_tsql_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word),
@@ -562,15 +562,15 @@ class TransactSqlLexer(RegexLexer):
rating = 1.0
else:
name_between_backtick_count = len(
- name_between_backtick_re.findall(text))
+ name_between_backtick_re.findall(text))
name_between_bracket_count = len(
name_between_bracket_re.findall(text))
# We need to check if there are any names using
# backticks or brackets, as otherwise both are 0
# and 0 >= 2 * 0, so we would always assume it's true
dialect_name_count = name_between_backtick_count + name_between_bracket_count
- if dialect_name_count >= 1 and \
- name_between_bracket_count >= 2 * name_between_backtick_count:
+ if dialect_name_count >= 1 and \
+ name_between_bracket_count >= 2 * name_between_backtick_count:
# Found at least twice as many [name] as `name`.
rating += 0.5
elif name_between_bracket_count > name_between_backtick_count:
@@ -585,12 +585,12 @@ class TransactSqlLexer(RegexLexer):
class MySqlLexer(RegexLexer):
- """The Oracle MySQL lexer.
-
- This lexer does not attempt to maintain strict compatibility with
- MariaDB syntax or keywords. Although MySQL and MariaDB's common code
- history suggests there may be significant overlap between the two,
- compatibility between the two is not a target for this lexer.
+ """The Oracle MySQL lexer.
+
+ This lexer does not attempt to maintain strict compatibility with
+ MariaDB syntax or keywords. Although MySQL and MariaDB's common code
+ history suggests there may be significant overlap between the two,
+ compatibility between the two is not a target for this lexer.
"""
name = 'MySQL'
@@ -601,163 +601,163 @@ class MySqlLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Whitespace),
-
- # Comments
- (r'(?:#|--\s+).*', Comment.Single),
- (r'/\*\+', Comment.Special, 'optimizer-hints'),
- (r'/\*', Comment.Multiline, 'multiline-comment'),
-
- # Hexadecimal literals
- (r"x'([0-9a-f]{2})+'", Number.Hex), # MySQL requires paired hex characters in this form.
- (r'0x[0-9a-f]+', Number.Hex),
-
- # Binary literals
- (r"b'[01]+'", Number.Bin),
- (r'0b[01]+', Number.Bin),
-
- # Numeric literals
- (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float), # Mandatory integer, optional fraction and exponent
- (r'[0-9]*\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), # Mandatory fraction, optional integer and exponent
- (r'[0-9]+e[+-]?[0-9]+', Number.Float), # Exponents with integer significands are still floats
- (r'[0-9]+(?=[^0-9a-z$_\u0080-\uffff])', Number.Integer), # Integers that are not in a schema object name
-
- # Date literals
- (r"\{\s*d\s*(?P<quote>['\"])\s*\d{2}(\d{2})?.?\d{2}.?\d{2}\s*(?P=quote)\s*\}",
- Literal.Date),
-
- # Time literals
- (r"\{\s*t\s*(?P<quote>['\"])\s*(?:\d+\s+)?\d{1,2}.?\d{1,2}.?\d{1,2}(\.\d*)?\s*(?P=quote)\s*\}",
- Literal.Date),
-
- # Timestamp literals
- (
- r"\{\s*ts\s*(?P<quote>['\"])\s*"
- r"\d{2}(?:\d{2})?.?\d{2}.?\d{2}" # Date part
- r"\s+" # Whitespace between date and time
- r"\d{1,2}.?\d{1,2}.?\d{1,2}(\.\d*)?" # Time part
- r"\s*(?P=quote)\s*\}",
- Literal.Date
- ),
-
- # String literals
- (r"'", String.Single, 'single-quoted-string'),
- (r'"', String.Double, 'double-quoted-string'),
-
- # Variables
- (r'@@(?:global\.|persist\.|persist_only\.|session\.)?[a-z_]+', Name.Variable),
- (r'@[a-z0-9_$.]+', Name.Variable),
- (r"@'", Name.Variable, 'single-quoted-variable'),
- (r'@"', Name.Variable, 'double-quoted-variable'),
- (r"@`", Name.Variable, 'backtick-quoted-variable'),
- (r'\?', Name.Variable), # For demonstrating prepared statements
-
- # Operators
- (r'[!%&*+/:<=>^|~-]+', Operator),
-
- # Exceptions; these words tokenize differently in different contexts.
- (r'\b(set)(?!\s*\()', Keyword),
+
+ # Comments
+ (r'(?:#|--\s+).*', Comment.Single),
+ (r'/\*\+', Comment.Special, 'optimizer-hints'),
+ (r'/\*', Comment.Multiline, 'multiline-comment'),
+
+ # Hexadecimal literals
+ (r"x'([0-9a-f]{2})+'", Number.Hex), # MySQL requires paired hex characters in this form.
+ (r'0x[0-9a-f]+', Number.Hex),
+
+ # Binary literals
+ (r"b'[01]+'", Number.Bin),
+ (r'0b[01]+', Number.Bin),
+
+ # Numeric literals
+ (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float), # Mandatory integer, optional fraction and exponent
+ (r'[0-9]*\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), # Mandatory fraction, optional integer and exponent
+ (r'[0-9]+e[+-]?[0-9]+', Number.Float), # Exponents with integer significands are still floats
+ (r'[0-9]+(?=[^0-9a-z$_\u0080-\uffff])', Number.Integer), # Integers that are not in a schema object name
+
+ # Date literals
+ (r"\{\s*d\s*(?P<quote>['\"])\s*\d{2}(\d{2})?.?\d{2}.?\d{2}\s*(?P=quote)\s*\}",
+ Literal.Date),
+
+ # Time literals
+ (r"\{\s*t\s*(?P<quote>['\"])\s*(?:\d+\s+)?\d{1,2}.?\d{1,2}.?\d{1,2}(\.\d*)?\s*(?P=quote)\s*\}",
+ Literal.Date),
+
+ # Timestamp literals
+ (
+ r"\{\s*ts\s*(?P<quote>['\"])\s*"
+ r"\d{2}(?:\d{2})?.?\d{2}.?\d{2}" # Date part
+ r"\s+" # Whitespace between date and time
+ r"\d{1,2}.?\d{1,2}.?\d{1,2}(\.\d*)?" # Time part
+ r"\s*(?P=quote)\s*\}",
+ Literal.Date
+ ),
+
+ # String literals
+ (r"'", String.Single, 'single-quoted-string'),
+ (r'"', String.Double, 'double-quoted-string'),
+
+ # Variables
+ (r'@@(?:global\.|persist\.|persist_only\.|session\.)?[a-z_]+', Name.Variable),
+ (r'@[a-z0-9_$.]+', Name.Variable),
+ (r"@'", Name.Variable, 'single-quoted-variable'),
+ (r'@"', Name.Variable, 'double-quoted-variable'),
+ (r"@`", Name.Variable, 'backtick-quoted-variable'),
+ (r'\?', Name.Variable), # For demonstrating prepared statements
+
+ # Operators
+ (r'[!%&*+/:<=>^|~-]+', Operator),
+
+ # Exceptions; these words tokenize differently in different contexts.
+ (r'\b(set)(?!\s*\()', Keyword),
(r'\b(character)(\s+)(set)\b', bygroups(Keyword, Whitespace, Keyword)),
- # In all other known cases, "SET" is tokenized by MYSQL_DATATYPES.
-
- (words(MYSQL_CONSTANTS, prefix=r'\b', suffix=r'\b'), Name.Constant),
- (words(MYSQL_DATATYPES, prefix=r'\b', suffix=r'\b'), Keyword.Type),
- (words(MYSQL_KEYWORDS, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(MYSQL_FUNCTIONS, prefix=r'\b', suffix=r'\b(\s*)(\()'),
+ # In all other known cases, "SET" is tokenized by MYSQL_DATATYPES.
+
+ (words(MYSQL_CONSTANTS, prefix=r'\b', suffix=r'\b'), Name.Constant),
+ (words(MYSQL_DATATYPES, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (words(MYSQL_KEYWORDS, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(MYSQL_FUNCTIONS, prefix=r'\b', suffix=r'\b(\s*)(\()'),
bygroups(Name.Function, Whitespace, Punctuation)),
-
- # Schema object names
- #
- # Note: Although the first regex supports unquoted all-numeric
- # identifiers, this will not be a problem in practice because
- # numeric literals have already been handled above.
- #
- ('[0-9a-z$_\u0080-\uffff]+', Name),
- (r'`', Name.Quoted, 'schema-object-name'),
-
- # Punctuation
- (r'[(),.;]', Punctuation),
+
+ # Schema object names
+ #
+ # Note: Although the first regex supports unquoted all-numeric
+ # identifiers, this will not be a problem in practice because
+ # numeric literals have already been handled above.
+ #
+ ('[0-9a-z$_\u0080-\uffff]+', Name),
+ (r'`', Name.Quoted, 'schema-object-name'),
+
+ # Punctuation
+ (r'[(),.;]', Punctuation),
],
-
- # Multiline comment substates
- # ---------------------------
-
- 'optimizer-hints': [
- (r'[^*a-z]+', Comment.Special),
- (r'\*/', Comment.Special, '#pop'),
- (words(MYSQL_OPTIMIZER_HINTS, suffix=r'\b'), Comment.Preproc),
- ('[a-z]+', Comment.Special),
- (r'\*', Comment.Special),
- ],
-
- 'multiline-comment': [
- (r'[^*]+', Comment.Multiline),
+
+ # Multiline comment substates
+ # ---------------------------
+
+ 'optimizer-hints': [
+ (r'[^*a-z]+', Comment.Special),
+ (r'\*/', Comment.Special, '#pop'),
+ (words(MYSQL_OPTIMIZER_HINTS, suffix=r'\b'), Comment.Preproc),
+ ('[a-z]+', Comment.Special),
+ (r'\*', Comment.Special),
+ ],
+
+ 'multiline-comment': [
+ (r'[^*]+', Comment.Multiline),
(r'\*/', Comment.Multiline, '#pop'),
- (r'\*', Comment.Multiline),
- ],
-
- # String substates
- # ----------------
-
- 'single-quoted-string': [
- (r"[^'\\]+", String.Single),
- (r"''", String.Escape),
- (r"""\\[0'"bnrtZ\\%_]""", String.Escape),
- (r"'", String.Single, '#pop'),
- ],
-
- 'double-quoted-string': [
- (r'[^"\\]+', String.Double),
- (r'""', String.Escape),
- (r"""\\[0'"bnrtZ\\%_]""", String.Escape),
- (r'"', String.Double, '#pop'),
- ],
-
- # Variable substates
- # ------------------
-
- 'single-quoted-variable': [
- (r"[^']+", Name.Variable),
- (r"''", Name.Variable),
- (r"'", Name.Variable, '#pop'),
- ],
-
- 'double-quoted-variable': [
- (r'[^"]+', Name.Variable),
- (r'""', Name.Variable),
- (r'"', Name.Variable, '#pop'),
- ],
-
- 'backtick-quoted-variable': [
- (r'[^`]+', Name.Variable),
- (r'``', Name.Variable),
- (r'`', Name.Variable, '#pop'),
- ],
-
- # Schema object name substates
- # ----------------------------
- #
- # "Name.Quoted" and "Name.Quoted.Escape" are non-standard but
- # formatters will style them as "Name" by default but add
- # additional styles based on the token name. This gives users
- # flexibility to add custom styles as desired.
- #
- 'schema-object-name': [
- (r'[^`]+', Name.Quoted),
- (r'``', Name.Quoted.Escape),
- (r'`', Name.Quoted, '#pop'),
- ],
+ (r'\*', Comment.Multiline),
+ ],
+
+ # String substates
+ # ----------------
+
+ 'single-quoted-string': [
+ (r"[^'\\]+", String.Single),
+ (r"''", String.Escape),
+ (r"""\\[0'"bnrtZ\\%_]""", String.Escape),
+ (r"'", String.Single, '#pop'),
+ ],
+
+ 'double-quoted-string': [
+ (r'[^"\\]+', String.Double),
+ (r'""', String.Escape),
+ (r"""\\[0'"bnrtZ\\%_]""", String.Escape),
+ (r'"', String.Double, '#pop'),
+ ],
+
+ # Variable substates
+ # ------------------
+
+ 'single-quoted-variable': [
+ (r"[^']+", Name.Variable),
+ (r"''", Name.Variable),
+ (r"'", Name.Variable, '#pop'),
+ ],
+
+ 'double-quoted-variable': [
+ (r'[^"]+', Name.Variable),
+ (r'""', Name.Variable),
+ (r'"', Name.Variable, '#pop'),
+ ],
+
+ 'backtick-quoted-variable': [
+ (r'[^`]+', Name.Variable),
+ (r'``', Name.Variable),
+ (r'`', Name.Variable, '#pop'),
+ ],
+
+ # Schema object name substates
+ # ----------------------------
+ #
+ # "Name.Quoted" and "Name.Quoted.Escape" are non-standard but
+ # formatters will style them as "Name" by default but add
+ # additional styles based on the token name. This gives users
+ # flexibility to add custom styles as desired.
+ #
+ 'schema-object-name': [
+ (r'[^`]+', Name.Quoted),
+ (r'``', Name.Quoted.Escape),
+ (r'`', Name.Quoted, '#pop'),
+ ],
}
def analyse_text(text):
rating = 0
name_between_backtick_count = len(
- name_between_backtick_re.findall(text))
+ name_between_backtick_re.findall(text))
name_between_bracket_count = len(
name_between_bracket_re.findall(text))
# Same logic as above in the TSQL analysis
dialect_name_count = name_between_backtick_count + name_between_bracket_count
- if dialect_name_count >= 1 and \
- name_between_backtick_count >= 2 * name_between_bracket_count:
+ if dialect_name_count >= 1 and \
+ name_between_backtick_count >= 2 * name_between_bracket_count:
# Found at least twice as many `name` as [name].
rating += 0.5
elif name_between_backtick_count > name_between_bracket_count:
@@ -795,8 +795,8 @@ class SqliteConsoleLexer(Lexer):
curcode += line[8:]
else:
if curcode:
- yield from do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode))
+ yield from do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
if line.startswith('SQL error: '):
@@ -804,8 +804,8 @@ class SqliteConsoleLexer(Lexer):
else:
yield (match.start(), Generic.Output, line)
if curcode:
- yield from do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode))
+ yield from do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode))
class RqlLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/stata.py b/contrib/python/Pygments/py3/pygments/lexers/stata.py
index 4ec6cf4f75..8d96a7074d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/stata.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/stata.py
@@ -4,12 +4,12 @@
Lexer for Stata
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import RegexLexer, default, include, words
+from pygments.lexer import RegexLexer, default, include, words
from pygments.token import Comment, Keyword, Name, Number, \
String, Text, Operator
@@ -26,8 +26,8 @@ class StataLexer(RegexLexer):
"""
# Syntax based on
# - http://fmwww.bc.edu/RePEc/bocode/s/synlightlist.ado
- # - https://github.com/isagalaev/highlight.js/blob/master/src/languages/stata.js
- # - https://github.com/jpitblado/vim-stata/blob/master/syntax/stata.vim
+ # - https://github.com/isagalaev/highlight.js/blob/master/src/languages/stata.js
+ # - https://github.com/jpitblado/vim-stata/blob/master/syntax/stata.vim
name = 'Stata'
aliases = ['stata', 'do']
@@ -117,27 +117,27 @@ class StataLexer(RegexLexer):
# A global is more restricted, so we do follow rules. Note only
# locals explicitly enclosed ${} can be nested.
'macros': [
- (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
(r'\$', Name.Variable.Global, 'macro-global-name'),
(r'`', Name.Variable, 'macro-local'),
],
'macro-local': [
(r'`', Name.Variable, '#push'),
(r"'", Name.Variable, '#pop'),
- (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
(r'\$', Name.Variable.Global, 'macro-global-name'),
(r'.', Name.Variable), # fallback
],
'macro-global-nested': [
- (r'\$(\{|(?=[$`]))', Name.Variable.Global, '#push'),
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, '#push'),
(r'\}', Name.Variable.Global, '#pop'),
(r'\$', Name.Variable.Global, 'macro-global-name'),
(r'`', Name.Variable, 'macro-local'),
(r'\w', Name.Variable.Global), # fallback
- default('#pop'),
+ default('#pop'),
],
'macro-global-name': [
- (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'),
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'),
(r'\$', Name.Variable.Global, 'macro-global-name', '#pop'),
(r'`', Name.Variable, 'macro-local', '#pop'),
(r'\w{1,32}', Name.Variable.Global, '#pop'),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/supercollider.py b/contrib/python/Pygments/py3/pygments/lexers/supercollider.py
index 724674f5e6..4fac4a8d00 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/supercollider.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/supercollider.py
@@ -4,7 +4,7 @@
Lexer for SuperCollider
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -25,7 +25,7 @@ class SuperColliderLexer(RegexLexer):
"""
name = 'SuperCollider'
- aliases = ['supercollider', 'sc']
+ aliases = ['supercollider', 'sc']
filenames = ['*.sc', '*.scd']
mimetypes = ['application/supercollider', 'text/supercollider', ]
@@ -83,12 +83,12 @@ class SuperColliderLexer(RegexLexer):
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
]
}
-
- def analyse_text(text):
- """We're searching for a common function and a unique keyword here."""
- if 'SinOsc' in text or 'thisFunctionDef' in text:
- return 0.1
+
+ def analyse_text(text):
+ """We're searching for a common function and a unique keyword here."""
+ if 'SinOsc' in text or 'thisFunctionDef' in text:
+ return 0.1
diff --git a/contrib/python/Pygments/py3/pygments/lexers/tcl.py b/contrib/python/Pygments/py3/pygments/lexers/tcl.py
index 7be07357aa..7fa45b3ded 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/tcl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/tcl.py
@@ -4,13 +4,13 @@
Lexers for Tcl and related languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Whitespace
+ Number, Whitespace
from pygments.util import shebang_matches
__all__ = ['TclLexer']
@@ -79,13 +79,13 @@ class TclLexer(RegexLexer):
(r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
],
'data': [
- (r'\s+', Whitespace),
+ (r'\s+', Whitespace),
(r'0x[a-fA-F0-9]+', Number.Hex),
(r'0[0-7]+', Number.Oct),
(r'\d+\.\d+', Number.Float),
(r'\d+', Number.Integer),
(r'\$([\w.:-]+)', Name.Variable),
- (r'([\w.,@:-]+)', Text),
+ (r'([\w.,@:-]+)', Text),
],
'params': [
(r';', Keyword, '#pop'),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/teal.py b/contrib/python/Pygments/py3/pygments/lexers/teal.py
index 33088c882b..232e9c78e6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/teal.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/teal.py
@@ -1,87 +1,87 @@
-"""
- pygments.lexers.teal
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for TEAL.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include, words
-from pygments.token import Comment, Name, Number, String, Text, Keyword
-
-__all__ = ['TealLexer']
-
-class TealLexer(RegexLexer):
- """
- For the `Transaction Execution Approval Language (TEAL)
- <https://developer.algorand.org/docs/reference/teal/specification/>`
-
- For more information about the grammar, see:
- https://github.com/algorand/go-algorand/blob/master/data/transactions/logic/assembler.go
-
- .. versionadded:: 2.9
- """
- name = 'teal'
- aliases = ['teal']
- filenames = ['*.teal']
-
- keywords = words({
- 'Sender', 'Fee', 'FirstValid', 'FirstValidTime', 'LastValid', 'Note',
- 'Lease', 'Receiver', 'Amount', 'CloseRemainderTo', 'VotePK',
- 'SelectionPK', 'VoteFirst', 'VoteLast', 'VoteKeyDilution', 'Type',
- 'TypeEnum', 'XferAsset', 'AssetAmount', 'AssetSender', 'AssetReceiver',
- 'AssetCloseTo', 'GroupIndex', 'TxID', 'ApplicationID', 'OnCompletion',
- 'ApplicationArgs', 'NumAppArgs', 'Accounts', 'NumAccounts',
- 'ApprovalProgram', 'ClearStateProgram', 'RekeyTo', 'ConfigAsset',
- 'ConfigAssetTotal', 'ConfigAssetDecimals', 'ConfigAssetDefaultFrozen',
- 'ConfigAssetUnitName', 'ConfigAssetName', 'ConfigAssetURL',
- 'ConfigAssetMetadataHash', 'ConfigAssetManager', 'ConfigAssetReserve',
- 'ConfigAssetFreeze', 'ConfigAssetClawback', 'FreezeAsset',
- 'FreezeAssetAccount', 'FreezeAssetFrozen',
- 'NoOp', 'OptIn', 'CloseOut', 'ClearState', 'UpdateApplication',
- 'DeleteApplication',
- 'MinTxnFee', 'MinBalance', 'MaxTxnLife', 'ZeroAddress', 'GroupSize',
- 'LogicSigVersion', 'Round', 'LatestTimestamp', 'CurrentApplicationID',
- 'AssetBalance', 'AssetFrozen',
- 'AssetTotal', 'AssetDecimals', 'AssetDefaultFrozen', 'AssetUnitName',
- 'AssetName', 'AssetURL', 'AssetMetadataHash', 'AssetManager',
- 'AssetReserve', 'AssetFreeze', 'AssetClawback',
- }, suffix = r'\b')
-
- identifier = r'[^ \t\n]+(?=\/\/)|[^ \t\n]+'
- newline = r'\r?\n'
- tokens = {
- 'root': [
- include('whitespace'),
- # pragmas match specifically on the space character
- (r'^#pragma .*' + newline, Comment.Directive),
- # labels must be followed by a space,
- # but anything after that is ignored
- ('(' + identifier + ':' + ')' + '([ \t].*)',
- bygroups(Name.Label, Comment.Single)),
- (identifier, Name.Function, 'function-args'),
- ],
- 'function-args': [
- include('whitespace'),
- (r'"', String, 'string'),
- (r'(b(?:ase)?(?:32|64) ?)(\(?[a-zA-Z0-9+/=]+\)?)',
- bygroups(String.Affix, String.Other)),
- (r'[A-Z2-7]{58}', Number), # address
- (r'0x[\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- (keywords, Keyword),
- (identifier, Name.Attributes), # branch targets
- (newline, Text, '#pop'),
- ],
- 'string': [
- (r'\\(?:["nrt\\]|x\d\d)', String.Escape),
- (r'[^\\\"\n]+', String),
- (r'"', String, '#pop'),
- ],
- 'whitespace': [
- (r'[ \t]+', Text),
- (r'//[^\n]+', Comment.Single),
- ],
- }
+"""
+ pygments.lexers.teal
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for TEAL.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include, words
+from pygments.token import Comment, Name, Number, String, Text, Keyword
+
+__all__ = ['TealLexer']
+
+class TealLexer(RegexLexer):
+ """
+ For the `Transaction Execution Approval Language (TEAL)
+ <https://developer.algorand.org/docs/reference/teal/specification/>`
+
+ For more information about the grammar, see:
+ https://github.com/algorand/go-algorand/blob/master/data/transactions/logic/assembler.go
+
+ .. versionadded:: 2.9
+ """
+ name = 'teal'
+ aliases = ['teal']
+ filenames = ['*.teal']
+
+ keywords = words({
+ 'Sender', 'Fee', 'FirstValid', 'FirstValidTime', 'LastValid', 'Note',
+ 'Lease', 'Receiver', 'Amount', 'CloseRemainderTo', 'VotePK',
+ 'SelectionPK', 'VoteFirst', 'VoteLast', 'VoteKeyDilution', 'Type',
+ 'TypeEnum', 'XferAsset', 'AssetAmount', 'AssetSender', 'AssetReceiver',
+ 'AssetCloseTo', 'GroupIndex', 'TxID', 'ApplicationID', 'OnCompletion',
+ 'ApplicationArgs', 'NumAppArgs', 'Accounts', 'NumAccounts',
+ 'ApprovalProgram', 'ClearStateProgram', 'RekeyTo', 'ConfigAsset',
+ 'ConfigAssetTotal', 'ConfigAssetDecimals', 'ConfigAssetDefaultFrozen',
+ 'ConfigAssetUnitName', 'ConfigAssetName', 'ConfigAssetURL',
+ 'ConfigAssetMetadataHash', 'ConfigAssetManager', 'ConfigAssetReserve',
+ 'ConfigAssetFreeze', 'ConfigAssetClawback', 'FreezeAsset',
+ 'FreezeAssetAccount', 'FreezeAssetFrozen',
+ 'NoOp', 'OptIn', 'CloseOut', 'ClearState', 'UpdateApplication',
+ 'DeleteApplication',
+ 'MinTxnFee', 'MinBalance', 'MaxTxnLife', 'ZeroAddress', 'GroupSize',
+ 'LogicSigVersion', 'Round', 'LatestTimestamp', 'CurrentApplicationID',
+ 'AssetBalance', 'AssetFrozen',
+ 'AssetTotal', 'AssetDecimals', 'AssetDefaultFrozen', 'AssetUnitName',
+ 'AssetName', 'AssetURL', 'AssetMetadataHash', 'AssetManager',
+ 'AssetReserve', 'AssetFreeze', 'AssetClawback',
+ }, suffix = r'\b')
+
+ identifier = r'[^ \t\n]+(?=\/\/)|[^ \t\n]+'
+ newline = r'\r?\n'
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ # pragmas match specifically on the space character
+ (r'^#pragma .*' + newline, Comment.Directive),
+ # labels must be followed by a space,
+ # but anything after that is ignored
+ ('(' + identifier + ':' + ')' + '([ \t].*)',
+ bygroups(Name.Label, Comment.Single)),
+ (identifier, Name.Function, 'function-args'),
+ ],
+ 'function-args': [
+ include('whitespace'),
+ (r'"', String, 'string'),
+ (r'(b(?:ase)?(?:32|64) ?)(\(?[a-zA-Z0-9+/=]+\)?)',
+ bygroups(String.Affix, String.Other)),
+ (r'[A-Z2-7]{58}', Number), # address
+ (r'0x[\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (keywords, Keyword),
+ (identifier, Name.Attributes), # branch targets
+ (newline, Text, '#pop'),
+ ],
+ 'string': [
+ (r'\\(?:["nrt\\]|x\d\d)', String.Escape),
+ (r'[^\\\"\n]+', String),
+ (r'"', String, '#pop'),
+ ],
+ 'whitespace': [
+ (r'[ \t]+', Text),
+ (r'//[^\n]+', Comment.Single),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/templates.py b/contrib/python/Pygments/py3/pygments/lexers/templates.py
index 548e14afe2..7c1f509deb 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/templates.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/templates.py
@@ -4,7 +4,7 @@
Lexers for various template engines' markup.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -178,8 +178,8 @@ class SmartyLexer(RegexLexer):
(r'(true|false|null)\b', Keyword.Constant),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'[a-zA-Z_]\w*', Name.Attribute)
]
}
@@ -225,7 +225,7 @@ class VelocityLexer(RegexLexer):
'directiveparams'),
(r'(#\{?)(' + identifier + r')(\}|\b)',
bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
- (r'\$!?\{?', Punctuation, 'variable')
+ (r'\$!?\{?', Punctuation, 'variable')
],
'variable': [
(identifier, Name.Variable),
@@ -248,11 +248,11 @@ class VelocityLexer(RegexLexer):
(r'\]', Operator, '#pop')
],
'funcparams': [
- (r'\$!?\{?', Punctuation, 'variable'),
+ (r'\$!?\{?', Punctuation, 'variable'),
(r'\s+', Text),
(r'[,:]', Punctuation),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r"\b[0-9]+\b", Number),
(r'(true|false|null)\b', Keyword.Constant),
@@ -267,13 +267,13 @@ class VelocityLexer(RegexLexer):
def analyse_text(text):
rv = 0.0
- if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text, re.DOTALL):
+ if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text, re.DOTALL):
rv += 0.25
- if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
+ if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
rv += 0.15
- if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
+ if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
rv += 0.15
- if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
+ if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
r'(\.\w+(\([^)]*\))?)*\}?', text):
rv += 0.01
return rv
@@ -292,7 +292,7 @@ class VelocityHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+velocity']
def __init__(self, **options):
- super().__init__(HtmlLexer, VelocityLexer, **options)
+ super().__init__(HtmlLexer, VelocityLexer, **options)
class VelocityXmlLexer(DelegatingLexer):
@@ -308,7 +308,7 @@ class VelocityXmlLexer(DelegatingLexer):
mimetypes = ['application/xml+velocity']
def __init__(self, **options):
- super().__init__(XmlLexer, VelocityLexer, **options)
+ super().__init__(XmlLexer, VelocityLexer, **options)
def analyse_text(text):
rv = VelocityLexer.analyse_text(text) - 0.01
@@ -320,7 +320,7 @@ class VelocityXmlLexer(DelegatingLexer):
class DjangoLexer(RegexLexer):
"""
Generic `django <http://www.djangoproject.com/documentation/templates/>`_
- and `jinja <https://jinja.pocoo.org/jinja/>`_ template lexer.
+ and `jinja <https://jinja.pocoo.org/jinja/>`_ template lexer.
It just highlights django/jinja code between the preprocessor directives,
other data is left untouched by the lexer.
@@ -337,7 +337,7 @@ class DjangoLexer(RegexLexer):
(r'[^{]+', Other),
(r'\{\{', Comment.Preproc, 'var'),
# jinja/django comments
- (r'\{#.*?#\}', Comment),
+ (r'\{#.*?#\}', Comment),
# django comments
(r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
@@ -370,8 +370,8 @@ class DjangoLexer(RegexLexer):
(r'(loop|block|super|forloop)\b', Name.Builtin),
(r'[a-zA-Z_][\w-]*', Name.Variable),
(r'\.\w+', Name.Variable),
- (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
@@ -461,7 +461,7 @@ class MyghtyHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+myghty']
def __init__(self, **options):
- super().__init__(HtmlLexer, MyghtyLexer, **options)
+ super().__init__(HtmlLexer, MyghtyLexer, **options)
class MyghtyXmlLexer(DelegatingLexer):
@@ -477,7 +477,7 @@ class MyghtyXmlLexer(DelegatingLexer):
mimetypes = ['application/xml+myghty']
def __init__(self, **options):
- super().__init__(XmlLexer, MyghtyLexer, **options)
+ super().__init__(XmlLexer, MyghtyLexer, **options)
class MyghtyJavascriptLexer(DelegatingLexer):
@@ -489,13 +489,13 @@ class MyghtyJavascriptLexer(DelegatingLexer):
"""
name = 'JavaScript+Myghty'
- aliases = ['javascript+myghty', 'js+myghty']
+ aliases = ['javascript+myghty', 'js+myghty']
mimetypes = ['application/x-javascript+myghty',
'text/x-javascript+myghty',
'text/javascript+mygthy']
def __init__(self, **options):
- super().__init__(JavascriptLexer, MyghtyLexer, **options)
+ super().__init__(JavascriptLexer, MyghtyLexer, **options)
class MyghtyCssLexer(DelegatingLexer):
@@ -511,7 +511,7 @@ class MyghtyCssLexer(DelegatingLexer):
mimetypes = ['text/css+myghty']
def __init__(self, **options):
- super().__init__(CssLexer, MyghtyLexer, **options)
+ super().__init__(CssLexer, MyghtyLexer, **options)
class MasonLexer(RegexLexer):
@@ -536,8 +536,8 @@ class MasonLexer(RegexLexer):
(r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
bygroups(Name.Tag, Text, Name.Function, Name.Tag,
using(this), Name.Tag)),
- (r'(?s)(<%(\w+)(.*?)(>))(.*?)(</%\2\s*>)',
- bygroups(Name.Tag, None, None, None, using(PerlLexer), Name.Tag)),
+ (r'(?s)(<%(\w+)(.*?)(>))(.*?)(</%\2\s*>)',
+ bygroups(Name.Tag, None, None, None, using(PerlLexer), Name.Tag)),
(r'(?s)(<&[^|])(.*?)(,.*?)?(&>)',
bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
(r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
@@ -563,7 +563,7 @@ class MasonLexer(RegexLexer):
def analyse_text(text):
result = 0.0
- if re.search(r'</%(class|doc|init)>', text) is not None:
+ if re.search(r'</%(class|doc|init)>', text) is not None:
result = 1.0
elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
result = 0.11
@@ -588,12 +588,12 @@ class MakoLexer(RegexLexer):
tokens = {
'root': [
(r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
- bygroups(Text.Whitespace, Comment.Preproc, Keyword, Other)),
+ bygroups(Text.Whitespace, Comment.Preproc, Keyword, Other)),
(r'(\s*)(%)([^\n]*)(\n|\Z)',
- bygroups(Text.Whitespace, Comment.Preproc, using(PythonLexer), Other)),
+ bygroups(Text.Whitespace, Comment.Preproc, using(PythonLexer), Other)),
(r'(\s*)(##[^\n]*)(\n|\Z)',
- bygroups(Text.Whitespace, Comment.Single, Text.Whitespace)),
- (r'(?s)<%doc>.*?</%doc>', Comment.Multiline),
+ bygroups(Text.Whitespace, Comment.Single, Text.Whitespace)),
+ (r'(?s)<%doc>.*?</%doc>', Comment.Multiline),
(r'(<%)([\w.:]+)',
bygroups(Comment.Preproc, Name.Builtin), 'tag'),
(r'(</%)([\w.:]+)(>)',
@@ -651,7 +651,7 @@ class MakoHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+mako']
def __init__(self, **options):
- super().__init__(HtmlLexer, MakoLexer, **options)
+ super().__init__(HtmlLexer, MakoLexer, **options)
class MakoXmlLexer(DelegatingLexer):
@@ -667,7 +667,7 @@ class MakoXmlLexer(DelegatingLexer):
mimetypes = ['application/xml+mako']
def __init__(self, **options):
- super().__init__(XmlLexer, MakoLexer, **options)
+ super().__init__(XmlLexer, MakoLexer, **options)
class MakoJavascriptLexer(DelegatingLexer):
@@ -679,13 +679,13 @@ class MakoJavascriptLexer(DelegatingLexer):
"""
name = 'JavaScript+Mako'
- aliases = ['javascript+mako', 'js+mako']
+ aliases = ['javascript+mako', 'js+mako']
mimetypes = ['application/x-javascript+mako',
'text/x-javascript+mako',
'text/javascript+mako']
def __init__(self, **options):
- super().__init__(JavascriptLexer, MakoLexer, **options)
+ super().__init__(JavascriptLexer, MakoLexer, **options)
class MakoCssLexer(DelegatingLexer):
@@ -701,7 +701,7 @@ class MakoCssLexer(DelegatingLexer):
mimetypes = ['text/css+mako']
def __init__(self, **options):
- super().__init__(CssLexer, MakoLexer, **options)
+ super().__init__(CssLexer, MakoLexer, **options)
# Genshi and Cheetah lexers courtesy of Matt Good.
@@ -774,7 +774,7 @@ class CheetahHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+cheetah', 'text/html+spitfire']
def __init__(self, **options):
- super().__init__(HtmlLexer, CheetahLexer, **options)
+ super().__init__(HtmlLexer, CheetahLexer, **options)
class CheetahXmlLexer(DelegatingLexer):
@@ -788,7 +788,7 @@ class CheetahXmlLexer(DelegatingLexer):
mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
def __init__(self, **options):
- super().__init__(XmlLexer, CheetahLexer, **options)
+ super().__init__(XmlLexer, CheetahLexer, **options)
class CheetahJavascriptLexer(DelegatingLexer):
@@ -798,8 +798,8 @@ class CheetahJavascriptLexer(DelegatingLexer):
"""
name = 'JavaScript+Cheetah'
- aliases = ['javascript+cheetah', 'js+cheetah',
- 'javascript+spitfire', 'js+spitfire']
+ aliases = ['javascript+cheetah', 'js+cheetah',
+ 'javascript+spitfire', 'js+spitfire']
mimetypes = ['application/x-javascript+cheetah',
'text/x-javascript+cheetah',
'text/javascript+cheetah',
@@ -808,7 +808,7 @@ class CheetahJavascriptLexer(DelegatingLexer):
'text/javascript+spitfire']
def __init__(self, **options):
- super().__init__(JavascriptLexer, CheetahLexer, **options)
+ super().__init__(JavascriptLexer, CheetahLexer, **options)
class GenshiTextLexer(RegexLexer):
@@ -922,7 +922,7 @@ class HtmlGenshiLexer(DelegatingLexer):
mimetypes = ['text/html+genshi']
def __init__(self, **options):
- super().__init__(HtmlLexer, GenshiMarkupLexer, **options)
+ super().__init__(HtmlLexer, GenshiMarkupLexer, **options)
def analyse_text(text):
rv = 0.0
@@ -946,7 +946,7 @@ class GenshiLexer(DelegatingLexer):
mimetypes = ['application/x-genshi', 'application/x-kid']
def __init__(self, **options):
- super().__init__(XmlLexer, GenshiMarkupLexer, **options)
+ super().__init__(XmlLexer, GenshiMarkupLexer, **options)
def analyse_text(text):
rv = 0.0
@@ -971,7 +971,7 @@ class JavascriptGenshiLexer(DelegatingLexer):
'text/javascript+genshi']
def __init__(self, **options):
- super().__init__(JavascriptLexer, GenshiTextLexer, **options)
+ super().__init__(JavascriptLexer, GenshiTextLexer, **options)
def analyse_text(text):
return GenshiLexer.analyse_text(text) - 0.05
@@ -988,7 +988,7 @@ class CssGenshiLexer(DelegatingLexer):
mimetypes = ['text/css+genshi']
def __init__(self, **options):
- super().__init__(CssLexer, GenshiTextLexer, **options)
+ super().__init__(CssLexer, GenshiTextLexer, **options)
def analyse_text(text):
return GenshiLexer.analyse_text(text) - 0.05
@@ -1009,7 +1009,7 @@ class RhtmlLexer(DelegatingLexer):
mimetypes = ['text/html+ruby']
def __init__(self, **options):
- super().__init__(HtmlLexer, ErbLexer, **options)
+ super().__init__(HtmlLexer, ErbLexer, **options)
def analyse_text(text):
rv = ErbLexer.analyse_text(text) - 0.01
@@ -1026,12 +1026,12 @@ class XmlErbLexer(DelegatingLexer):
"""
name = 'XML+Ruby'
- aliases = ['xml+ruby', 'xml+erb']
+ aliases = ['xml+ruby', 'xml+erb']
alias_filenames = ['*.xml']
mimetypes = ['application/xml+ruby']
def __init__(self, **options):
- super().__init__(XmlLexer, ErbLexer, **options)
+ super().__init__(XmlLexer, ErbLexer, **options)
def analyse_text(text):
rv = ErbLexer.analyse_text(text) - 0.01
@@ -1046,12 +1046,12 @@ class CssErbLexer(DelegatingLexer):
"""
name = 'CSS+Ruby'
- aliases = ['css+ruby', 'css+erb']
+ aliases = ['css+ruby', 'css+erb']
alias_filenames = ['*.css']
mimetypes = ['text/css+ruby']
def __init__(self, **options):
- super().__init__(CssLexer, ErbLexer, **options)
+ super().__init__(CssLexer, ErbLexer, **options)
def analyse_text(text):
return ErbLexer.analyse_text(text) - 0.05
@@ -1064,14 +1064,14 @@ class JavascriptErbLexer(DelegatingLexer):
"""
name = 'JavaScript+Ruby'
- aliases = ['javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb']
+ aliases = ['javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+ruby',
'text/x-javascript+ruby',
'text/javascript+ruby']
def __init__(self, **options):
- super().__init__(JavascriptLexer, ErbLexer, **options)
+ super().__init__(JavascriptLexer, ErbLexer, **options)
def analyse_text(text):
return ErbLexer.analyse_text(text) - 0.05
@@ -1094,7 +1094,7 @@ class HtmlPhpLexer(DelegatingLexer):
'application/x-httpd-php4', 'application/x-httpd-php5']
def __init__(self, **options):
- super().__init__(HtmlLexer, PhpLexer, **options)
+ super().__init__(HtmlLexer, PhpLexer, **options)
def analyse_text(text):
rv = PhpLexer.analyse_text(text) - 0.01
@@ -1114,7 +1114,7 @@ class XmlPhpLexer(DelegatingLexer):
mimetypes = ['application/xml+php']
def __init__(self, **options):
- super().__init__(XmlLexer, PhpLexer, **options)
+ super().__init__(XmlLexer, PhpLexer, **options)
def analyse_text(text):
rv = PhpLexer.analyse_text(text) - 0.01
@@ -1134,7 +1134,7 @@ class CssPhpLexer(DelegatingLexer):
mimetypes = ['text/css+php']
def __init__(self, **options):
- super().__init__(CssLexer, PhpLexer, **options)
+ super().__init__(CssLexer, PhpLexer, **options)
def analyse_text(text):
return PhpLexer.analyse_text(text) - 0.05
@@ -1147,14 +1147,14 @@ class JavascriptPhpLexer(DelegatingLexer):
"""
name = 'JavaScript+PHP'
- aliases = ['javascript+php', 'js+php']
+ aliases = ['javascript+php', 'js+php']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+php',
'text/x-javascript+php',
'text/javascript+php']
def __init__(self, **options):
- super().__init__(JavascriptLexer, PhpLexer, **options)
+ super().__init__(JavascriptLexer, PhpLexer, **options)
def analyse_text(text):
return PhpLexer.analyse_text(text)
@@ -1174,7 +1174,7 @@ class HtmlSmartyLexer(DelegatingLexer):
mimetypes = ['text/html+smarty']
def __init__(self, **options):
- super().__init__(HtmlLexer, SmartyLexer, **options)
+ super().__init__(HtmlLexer, SmartyLexer, **options)
def analyse_text(text):
rv = SmartyLexer.analyse_text(text) - 0.01
@@ -1195,7 +1195,7 @@ class XmlSmartyLexer(DelegatingLexer):
mimetypes = ['application/xml+smarty']
def __init__(self, **options):
- super().__init__(XmlLexer, SmartyLexer, **options)
+ super().__init__(XmlLexer, SmartyLexer, **options)
def analyse_text(text):
rv = SmartyLexer.analyse_text(text) - 0.01
@@ -1216,7 +1216,7 @@ class CssSmartyLexer(DelegatingLexer):
mimetypes = ['text/css+smarty']
def __init__(self, **options):
- super().__init__(CssLexer, SmartyLexer, **options)
+ super().__init__(CssLexer, SmartyLexer, **options)
def analyse_text(text):
return SmartyLexer.analyse_text(text) - 0.05
@@ -1229,14 +1229,14 @@ class JavascriptSmartyLexer(DelegatingLexer):
"""
name = 'JavaScript+Smarty'
- aliases = ['javascript+smarty', 'js+smarty']
+ aliases = ['javascript+smarty', 'js+smarty']
alias_filenames = ['*.js', '*.tpl']
mimetypes = ['application/x-javascript+smarty',
'text/x-javascript+smarty',
'text/javascript+smarty']
def __init__(self, **options):
- super().__init__(JavascriptLexer, SmartyLexer, **options)
+ super().__init__(JavascriptLexer, SmartyLexer, **options)
def analyse_text(text):
return SmartyLexer.analyse_text(text) - 0.05
@@ -1256,7 +1256,7 @@ class HtmlDjangoLexer(DelegatingLexer):
mimetypes = ['text/html+django', 'text/html+jinja']
def __init__(self, **options):
- super().__init__(HtmlLexer, DjangoLexer, **options)
+ super().__init__(HtmlLexer, DjangoLexer, **options)
def analyse_text(text):
rv = DjangoLexer.analyse_text(text) - 0.01
@@ -1277,7 +1277,7 @@ class XmlDjangoLexer(DelegatingLexer):
mimetypes = ['application/xml+django', 'application/xml+jinja']
def __init__(self, **options):
- super().__init__(XmlLexer, DjangoLexer, **options)
+ super().__init__(XmlLexer, DjangoLexer, **options)
def analyse_text(text):
rv = DjangoLexer.analyse_text(text) - 0.01
@@ -1298,7 +1298,7 @@ class CssDjangoLexer(DelegatingLexer):
mimetypes = ['text/css+django', 'text/css+jinja']
def __init__(self, **options):
- super().__init__(CssLexer, DjangoLexer, **options)
+ super().__init__(CssLexer, DjangoLexer, **options)
def analyse_text(text):
return DjangoLexer.analyse_text(text) - 0.05
@@ -1311,8 +1311,8 @@ class JavascriptDjangoLexer(DelegatingLexer):
"""
name = 'JavaScript+Django/Jinja'
- aliases = ['javascript+django', 'js+django',
- 'javascript+jinja', 'js+jinja']
+ aliases = ['javascript+django', 'js+django',
+ 'javascript+jinja', 'js+jinja']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+django',
'application/x-javascript+jinja',
@@ -1322,7 +1322,7 @@ class JavascriptDjangoLexer(DelegatingLexer):
'text/javascript+jinja']
def __init__(self, **options):
- super().__init__(JavascriptLexer, DjangoLexer, **options)
+ super().__init__(JavascriptLexer, DjangoLexer, **options)
def analyse_text(text):
return DjangoLexer.analyse_text(text) - 0.05
@@ -1365,7 +1365,7 @@ class JspLexer(DelegatingLexer):
mimetypes = ['application/x-jsp']
def __init__(self, **options):
- super().__init__(XmlLexer, JspRootLexer, **options)
+ super().__init__(XmlLexer, JspRootLexer, **options)
def analyse_text(text):
rv = JavaLexer.analyse_text(text) - 0.01
@@ -1404,7 +1404,7 @@ class EvoqueLexer(RegexLexer):
# see doc for handling first name arg: /directives/evoque/
# + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
# should be using(PythonLexer), not passed out as String
- (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+)?'
+ (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+)?'
r'(.*?)((?(4)%)\})',
bygroups(Punctuation, Name.Builtin, Punctuation, None,
String, using(PythonLexer), Punctuation)),
@@ -1428,10 +1428,10 @@ class EvoqueLexer(RegexLexer):
],
}
- def analyse_text(text):
- """Evoque templates use $evoque, which is unique."""
- if '$evoque' in text:
- return 1
+ def analyse_text(text):
+ """Evoque templates use $evoque, which is unique."""
+ if '$evoque' in text:
+ return 1
class EvoqueHtmlLexer(DelegatingLexer):
"""
@@ -1446,12 +1446,12 @@ class EvoqueHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+evoque']
def __init__(self, **options):
- super().__init__(HtmlLexer, EvoqueLexer, **options)
-
- def analyse_text(text):
- return EvoqueLexer.analyse_text(text)
+ super().__init__(HtmlLexer, EvoqueLexer, **options)
+ def analyse_text(text):
+ return EvoqueLexer.analyse_text(text)
+
class EvoqueXmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
@@ -1465,12 +1465,12 @@ class EvoqueXmlLexer(DelegatingLexer):
mimetypes = ['application/xml+evoque']
def __init__(self, **options):
- super().__init__(XmlLexer, EvoqueLexer, **options)
-
- def analyse_text(text):
- return EvoqueLexer.analyse_text(text)
+ super().__init__(XmlLexer, EvoqueLexer, **options)
+ def analyse_text(text):
+ return EvoqueLexer.analyse_text(text)
+
class ColdfusionLexer(RegexLexer):
"""
Coldfusion statements
@@ -1575,7 +1575,7 @@ class ColdfusionHtmlLexer(DelegatingLexer):
mimetypes = ['application/x-coldfusion']
def __init__(self, **options):
- super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options)
+ super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options)
class ColdfusionCFCLexer(DelegatingLexer):
@@ -1590,7 +1590,7 @@ class ColdfusionCFCLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options)
+ super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options)
class SspLexer(DelegatingLexer):
@@ -1605,7 +1605,7 @@ class SspLexer(DelegatingLexer):
mimetypes = ['application/x-ssp']
def __init__(self, **options):
- super().__init__(XmlLexer, JspRootLexer, **options)
+ super().__init__(XmlLexer, JspRootLexer, **options)
def analyse_text(text):
rv = 0.0
@@ -1652,7 +1652,7 @@ class TeaTemplateLexer(DelegatingLexer):
mimetypes = ['text/x-tea']
def __init__(self, **options):
- super().__init__(XmlLexer, TeaTemplateRootLexer, **options)
+ super().__init__(XmlLexer, TeaTemplateRootLexer, **options)
def analyse_text(text):
rv = TeaLangLexer.analyse_text(text) - 0.01
@@ -1682,7 +1682,7 @@ class LassoHtmlLexer(DelegatingLexer):
'application/x-httpd-lasso[89]']
def __init__(self, **options):
- super().__init__(HtmlLexer, LassoLexer, **options)
+ super().__init__(HtmlLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.01
@@ -1706,7 +1706,7 @@ class LassoXmlLexer(DelegatingLexer):
mimetypes = ['application/xml+lasso']
def __init__(self, **options):
- super().__init__(XmlLexer, LassoLexer, **options)
+ super().__init__(XmlLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.01
@@ -1730,11 +1730,11 @@ class LassoCssLexer(DelegatingLexer):
def __init__(self, **options):
options['requiredelimiters'] = True
- super().__init__(CssLexer, LassoLexer, **options)
+ super().__init__(CssLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.05
- if re.search(r'\w+:[^;]+;', text):
+ if re.search(r'\w+:[^;]+;', text):
rv += 0.1
if 'padding:' in text:
rv += 0.1
@@ -1750,7 +1750,7 @@ class LassoJavascriptLexer(DelegatingLexer):
"""
name = 'JavaScript+Lasso'
- aliases = ['javascript+lasso', 'js+lasso']
+ aliases = ['javascript+lasso', 'js+lasso']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+lasso',
'text/x-javascript+lasso',
@@ -1758,7 +1758,7 @@ class LassoJavascriptLexer(DelegatingLexer):
def __init__(self, **options):
options['requiredelimiters'] = True
- super().__init__(JavascriptLexer, LassoLexer, **options)
+ super().__init__(JavascriptLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.05
@@ -1782,27 +1782,27 @@ class HandlebarsLexer(RegexLexer):
'root': [
(r'[^{]+', Other),
- # Comment start {{! }} or {{!--
+ # Comment start {{! }} or {{!--
(r'\{\{!.*\}\}', Comment),
- # HTML Escaping open {{{expression
+ # HTML Escaping open {{{expression
(r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
-
- # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
- (r'(\{\{)([#~/]+)([^\s}]*)',
- bygroups(Comment.Preproc, Number.Attribute, Number.Attribute), 'tag'),
+
+ # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
+ (r'(\{\{)([#~/]+)([^\s}]*)',
+ bygroups(Comment.Preproc, Number.Attribute, Number.Attribute), 'tag'),
(r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
],
'tag': [
(r'\s+', Text),
- # HTML Escaping close }}}
+ # HTML Escaping close }}}
(r'\}\}\}', Comment.Special, '#pop'),
- # blockClose}}, includes optional tilde ~
- (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
+ # blockClose}}, includes optional tilde ~
+ (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
# {{opt=something}}
- (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
+ (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
# Partials {{> ...}}
(r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
@@ -1825,7 +1825,7 @@ class HandlebarsLexer(RegexLexer):
include('generic'),
],
'variable': [
- (r'[()/@a-zA-Z][\w-]*', Name.Variable),
+ (r'[()/@a-zA-Z][\w-]*', Name.Variable),
(r'\.[\w-]+', Name.Variable),
(r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
],
@@ -1833,8 +1833,8 @@ class HandlebarsLexer(RegexLexer):
include('variable'),
# borrowed from DjangoLexer
- (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
]
@@ -1855,7 +1855,7 @@ class HandlebarsHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
def __init__(self, **options):
- super().__init__(HtmlLexer, HandlebarsLexer, **options)
+ super().__init__(HtmlLexer, HandlebarsLexer, **options)
class YamlJinjaLexer(DelegatingLexer):
@@ -1874,7 +1874,7 @@ class YamlJinjaLexer(DelegatingLexer):
mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
def __init__(self, **options):
- super().__init__(YamlLexer, DjangoLexer, **options)
+ super().__init__(YamlLexer, DjangoLexer, **options)
class LiquidLexer(RegexLexer):
@@ -2146,8 +2146,8 @@ class TwigLexer(RegexLexer):
(_ident_inner, Name.Variable),
(r'\.' + _ident_inner, Name.Variable),
(r'\.[0-9]+', Number),
- (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
@@ -2180,7 +2180,7 @@ class TwigHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+twig']
def __init__(self, **options):
- super().__init__(HtmlLexer, TwigLexer, **options)
+ super().__init__(HtmlLexer, TwigLexer, **options)
class Angular2Lexer(RegexLexer):
@@ -2215,9 +2215,9 @@ class Angular2Lexer(RegexLexer):
# *ngIf="..."; #f="ngForm"
(r'([*#])([\w:.-]+)(\s*)(=)(\s*)',
- bygroups(Punctuation, Name.Attribute, Text, Operator, Text), 'attr'),
+ bygroups(Punctuation, Name.Attribute, Text, Operator, Text), 'attr'),
(r'([*#])([\w:.-]+)(\s*)',
- bygroups(Punctuation, Name.Attribute, Text)),
+ bygroups(Punctuation, Name.Attribute, Text)),
],
'ngExpression': [
@@ -2226,8 +2226,8 @@ class Angular2Lexer(RegexLexer):
# Literals
(r':?(true|false)', String.Boolean),
- (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
@@ -2260,4 +2260,4 @@ class Angular2HtmlLexer(DelegatingLexer):
filenames = ['*.ng2']
def __init__(self, **options):
- super().__init__(HtmlLexer, Angular2Lexer, **options)
+ super().__init__(HtmlLexer, Angular2Lexer, **options)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/teraterm.py b/contrib/python/Pygments/py3/pygments/lexers/teraterm.py
index feb552d314..96b3cfccb8 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/teraterm.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/teraterm.py
@@ -4,7 +4,7 @@
Lexer for Tera Term macro files.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -24,7 +24,7 @@ class TeraTermLexer(RegexLexer):
.. versionadded:: 2.4
"""
name = 'Tera Term macro'
- aliases = ['teratermmacro', 'teraterm', 'ttl']
+ aliases = ['teratermmacro', 'teraterm', 'ttl']
filenames = ['*.ttl']
mimetypes = ['text/x-teratermmacro']
@@ -39,7 +39,7 @@ class TeraTermLexer(RegexLexer):
include('numeric-literals'),
include('string-literals'),
include('all-whitespace'),
- (r'\S', Text),
+ (r'\S', Text),
],
'comments': [
(r';[^\r\n]*', Comment.Single),
@@ -51,250 +51,250 @@ class TeraTermLexer(RegexLexer):
(r'[*/]', Comment.Multiline)
],
'labels': [
- (r'(?i)^(\s*)(:[a-z0-9_]+)', bygroups(Text, Name.Label)),
+ (r'(?i)^(\s*)(:[a-z0-9_]+)', bygroups(Text, Name.Label)),
],
'commands': [
(
r'(?i)\b('
- r'basename|'
- r'beep|'
- r'bplusrecv|'
- r'bplussend|'
- r'break|'
- r'bringupbox|'
- # 'call' is handled separately.
- r'callmenu|'
- r'changedir|'
- r'checksum16|'
- r'checksum16file|'
- r'checksum32|'
- r'checksum32file|'
- r'checksum8|'
- r'checksum8file|'
- r'clearscreen|'
- r'clipb2var|'
- r'closesbox|'
- r'closett|'
- r'code2str|'
- r'connect|'
- r'continue|'
- r'crc16|'
- r'crc16file|'
- r'crc32|'
- r'crc32file|'
- r'cygconnect|'
- r'delpassword|'
- r'dirname|'
- r'dirnamebox|'
- r'disconnect|'
- r'dispstr|'
- r'do|'
- r'else|'
- r'elseif|'
- r'enablekeyb|'
- r'end|'
- r'endif|'
- r'enduntil|'
- r'endwhile|'
- r'exec|'
- r'execcmnd|'
- r'exit|'
- r'expandenv|'
- r'fileclose|'
- r'fileconcat|'
- r'filecopy|'
- r'filecreate|'
- r'filedelete|'
- r'filelock|'
- r'filemarkptr|'
- r'filenamebox|'
- r'fileopen|'
- r'fileread|'
- r'filereadln|'
- r'filerename|'
- r'filesearch|'
- r'fileseek|'
- r'fileseekback|'
- r'filestat|'
- r'filestrseek|'
- r'filestrseek2|'
- r'filetruncate|'
- r'fileunlock|'
- r'filewrite|'
- r'filewriteln|'
- r'findclose|'
- r'findfirst|'
- r'findnext|'
- r'flushrecv|'
- r'foldercreate|'
- r'folderdelete|'
- r'foldersearch|'
- r'for|'
- r'getdate|'
- r'getdir|'
- r'getenv|'
- r'getfileattr|'
- r'gethostname|'
- r'getipv4addr|'
- r'getipv6addr|'
- r'getmodemstatus|'
- r'getpassword|'
- r'getspecialfolder|'
- r'gettime|'
- r'gettitle|'
- r'getttdir|'
- r'getver|'
- # 'goto' is handled separately.
- r'if|'
- r'ifdefined|'
- r'include|'
- r'inputbox|'
- r'int2str|'
- r'intdim|'
- r'ispassword|'
- r'kmtfinish|'
- r'kmtget|'
- r'kmtrecv|'
- r'kmtsend|'
- r'listbox|'
- r'loadkeymap|'
- r'logautoclosemode|'
- r'logclose|'
- r'loginfo|'
- r'logopen|'
- r'logpause|'
- r'logrotate|'
- r'logstart|'
- r'logwrite|'
- r'loop|'
- r'makepath|'
- r'messagebox|'
- r'mpause|'
- r'next|'
- r'passwordbox|'
- r'pause|'
- r'quickvanrecv|'
- r'quickvansend|'
- r'random|'
- r'recvln|'
- r'regexoption|'
- r'restoresetup|'
- r'return|'
- r'rotateleft|'
- r'rotateright|'
- r'scprecv|'
- r'scpsend|'
- r'send|'
- r'sendbreak|'
- r'sendbroadcast|'
- r'sendfile|'
- r'sendkcode|'
- r'sendln|'
- r'sendlnbroadcast|'
- r'sendlnmulticast|'
- r'sendmulticast|'
- r'setbaud|'
- r'setdate|'
- r'setdebug|'
- r'setdir|'
- r'setdlgpos|'
- r'setdtr|'
- r'setecho|'
- r'setenv|'
- r'setexitcode|'
- r'setfileattr|'
- r'setflowctrl|'
- r'setmulticastname|'
- r'setpassword|'
- r'setrts|'
- r'setspeed|'
- r'setsync|'
- r'settime|'
- r'settitle|'
- r'show|'
- r'showtt|'
- r'sprintf|'
- r'sprintf2|'
- r'statusbox|'
- r'str2code|'
- r'str2int|'
- r'strcompare|'
- r'strconcat|'
- r'strcopy|'
- r'strdim|'
- r'strinsert|'
- r'strjoin|'
- r'strlen|'
- r'strmatch|'
- r'strremove|'
- r'strreplace|'
- r'strscan|'
- r'strspecial|'
- r'strsplit|'
- r'strtrim|'
- r'testlink|'
- r'then|'
- r'tolower|'
- r'toupper|'
- r'unlink|'
- r'until|'
- r'uptime|'
- r'var2clipb|'
- r'wait|'
- r'wait4all|'
- r'waitevent|'
- r'waitln|'
- r'waitn|'
- r'waitrecv|'
- r'waitregex|'
- r'while|'
- r'xmodemrecv|'
- r'xmodemsend|'
- r'yesnobox|'
- r'ymodemrecv|'
- r'ymodemsend|'
- r'zmodemrecv|'
- r'zmodemsend'
+ r'basename|'
+ r'beep|'
+ r'bplusrecv|'
+ r'bplussend|'
+ r'break|'
+ r'bringupbox|'
+ # 'call' is handled separately.
+ r'callmenu|'
+ r'changedir|'
+ r'checksum16|'
+ r'checksum16file|'
+ r'checksum32|'
+ r'checksum32file|'
+ r'checksum8|'
+ r'checksum8file|'
+ r'clearscreen|'
+ r'clipb2var|'
+ r'closesbox|'
+ r'closett|'
+ r'code2str|'
+ r'connect|'
+ r'continue|'
+ r'crc16|'
+ r'crc16file|'
+ r'crc32|'
+ r'crc32file|'
+ r'cygconnect|'
+ r'delpassword|'
+ r'dirname|'
+ r'dirnamebox|'
+ r'disconnect|'
+ r'dispstr|'
+ r'do|'
+ r'else|'
+ r'elseif|'
+ r'enablekeyb|'
+ r'end|'
+ r'endif|'
+ r'enduntil|'
+ r'endwhile|'
+ r'exec|'
+ r'execcmnd|'
+ r'exit|'
+ r'expandenv|'
+ r'fileclose|'
+ r'fileconcat|'
+ r'filecopy|'
+ r'filecreate|'
+ r'filedelete|'
+ r'filelock|'
+ r'filemarkptr|'
+ r'filenamebox|'
+ r'fileopen|'
+ r'fileread|'
+ r'filereadln|'
+ r'filerename|'
+ r'filesearch|'
+ r'fileseek|'
+ r'fileseekback|'
+ r'filestat|'
+ r'filestrseek|'
+ r'filestrseek2|'
+ r'filetruncate|'
+ r'fileunlock|'
+ r'filewrite|'
+ r'filewriteln|'
+ r'findclose|'
+ r'findfirst|'
+ r'findnext|'
+ r'flushrecv|'
+ r'foldercreate|'
+ r'folderdelete|'
+ r'foldersearch|'
+ r'for|'
+ r'getdate|'
+ r'getdir|'
+ r'getenv|'
+ r'getfileattr|'
+ r'gethostname|'
+ r'getipv4addr|'
+ r'getipv6addr|'
+ r'getmodemstatus|'
+ r'getpassword|'
+ r'getspecialfolder|'
+ r'gettime|'
+ r'gettitle|'
+ r'getttdir|'
+ r'getver|'
+ # 'goto' is handled separately.
+ r'if|'
+ r'ifdefined|'
+ r'include|'
+ r'inputbox|'
+ r'int2str|'
+ r'intdim|'
+ r'ispassword|'
+ r'kmtfinish|'
+ r'kmtget|'
+ r'kmtrecv|'
+ r'kmtsend|'
+ r'listbox|'
+ r'loadkeymap|'
+ r'logautoclosemode|'
+ r'logclose|'
+ r'loginfo|'
+ r'logopen|'
+ r'logpause|'
+ r'logrotate|'
+ r'logstart|'
+ r'logwrite|'
+ r'loop|'
+ r'makepath|'
+ r'messagebox|'
+ r'mpause|'
+ r'next|'
+ r'passwordbox|'
+ r'pause|'
+ r'quickvanrecv|'
+ r'quickvansend|'
+ r'random|'
+ r'recvln|'
+ r'regexoption|'
+ r'restoresetup|'
+ r'return|'
+ r'rotateleft|'
+ r'rotateright|'
+ r'scprecv|'
+ r'scpsend|'
+ r'send|'
+ r'sendbreak|'
+ r'sendbroadcast|'
+ r'sendfile|'
+ r'sendkcode|'
+ r'sendln|'
+ r'sendlnbroadcast|'
+ r'sendlnmulticast|'
+ r'sendmulticast|'
+ r'setbaud|'
+ r'setdate|'
+ r'setdebug|'
+ r'setdir|'
+ r'setdlgpos|'
+ r'setdtr|'
+ r'setecho|'
+ r'setenv|'
+ r'setexitcode|'
+ r'setfileattr|'
+ r'setflowctrl|'
+ r'setmulticastname|'
+ r'setpassword|'
+ r'setrts|'
+ r'setspeed|'
+ r'setsync|'
+ r'settime|'
+ r'settitle|'
+ r'show|'
+ r'showtt|'
+ r'sprintf|'
+ r'sprintf2|'
+ r'statusbox|'
+ r'str2code|'
+ r'str2int|'
+ r'strcompare|'
+ r'strconcat|'
+ r'strcopy|'
+ r'strdim|'
+ r'strinsert|'
+ r'strjoin|'
+ r'strlen|'
+ r'strmatch|'
+ r'strremove|'
+ r'strreplace|'
+ r'strscan|'
+ r'strspecial|'
+ r'strsplit|'
+ r'strtrim|'
+ r'testlink|'
+ r'then|'
+ r'tolower|'
+ r'toupper|'
+ r'unlink|'
+ r'until|'
+ r'uptime|'
+ r'var2clipb|'
+ r'wait|'
+ r'wait4all|'
+ r'waitevent|'
+ r'waitln|'
+ r'waitn|'
+ r'waitrecv|'
+ r'waitregex|'
+ r'while|'
+ r'xmodemrecv|'
+ r'xmodemsend|'
+ r'yesnobox|'
+ r'ymodemrecv|'
+ r'ymodemsend|'
+ r'zmodemrecv|'
+ r'zmodemsend'
r')\b',
Keyword,
),
- (r'(?i)(call|goto)([ \t]+)([a-z0-9_]+)',
- bygroups(Keyword, Text, Name.Label)),
+ (r'(?i)(call|goto)([ \t]+)([a-z0-9_]+)',
+ bygroups(Keyword, Text, Name.Label)),
],
'builtin-variables': [
(
r'(?i)('
- r'groupmatchstr1|'
- r'groupmatchstr2|'
- r'groupmatchstr3|'
- r'groupmatchstr4|'
- r'groupmatchstr5|'
- r'groupmatchstr6|'
- r'groupmatchstr7|'
- r'groupmatchstr8|'
- r'groupmatchstr9|'
- r'inputstr|'
- r'matchstr|'
- r'mtimeout|'
- r'param1|'
- r'param2|'
- r'param3|'
- r'param4|'
- r'param5|'
- r'param6|'
- r'param7|'
- r'param8|'
- r'param9|'
- r'paramcnt|'
- r'params|'
- r'result|'
- r'timeout'
+ r'groupmatchstr1|'
+ r'groupmatchstr2|'
+ r'groupmatchstr3|'
+ r'groupmatchstr4|'
+ r'groupmatchstr5|'
+ r'groupmatchstr6|'
+ r'groupmatchstr7|'
+ r'groupmatchstr8|'
+ r'groupmatchstr9|'
+ r'inputstr|'
+ r'matchstr|'
+ r'mtimeout|'
+ r'param1|'
+ r'param2|'
+ r'param3|'
+ r'param4|'
+ r'param5|'
+ r'param6|'
+ r'param7|'
+ r'param8|'
+ r'param9|'
+ r'paramcnt|'
+ r'params|'
+ r'result|'
+ r'timeout'
r')\b',
Name.Builtin
),
],
'user-variables': [
- (r'(?i)[a-z_][a-z0-9_]*', Name.Variable),
+ (r'(?i)[a-z_][a-z0-9_]*', Name.Variable),
],
'numeric-literals': [
(r'(-?)([0-9]+)', bygroups(Operator, Number.Integer)),
@@ -306,7 +306,7 @@ class TeraTermLexer(RegexLexer):
(r'"', String.Double, 'in-double-string'),
],
'in-general-string': [
- (r'\\[\\nt]', String.Escape), # Only three escapes are supported.
+ (r'\\[\\nt]', String.Escape), # Only three escapes are supported.
(r'.', String),
],
'in-single-string': [
@@ -323,7 +323,7 @@ class TeraTermLexer(RegexLexer):
(r'[()]', String.Symbol),
],
'all-whitespace': [
- (r'\s+', Text),
+ (r'\s+', Text),
],
}
@@ -331,4 +331,4 @@ class TeraTermLexer(RegexLexer):
# but each has a recognizable and distinct syntax.
def analyse_text(text):
if re.search(TeraTermLexer.tokens['commands'][0][0], text):
- return 0.01
+ return 0.01
diff --git a/contrib/python/Pygments/py3/pygments/lexers/testing.py b/contrib/python/Pygments/py3/pygments/lexers/testing.py
index e52f572e88..82b9c8b680 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/testing.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/testing.py
@@ -4,7 +4,7 @@
Lexers for testing languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -16,19 +16,19 @@ __all__ = ['GherkinLexer', 'TAPLexer']
class GherkinLexer(RegexLexer):
"""
- For `Gherkin <https://github.com/aslakhellesoy/gherkin/>` syntax.
+ For `Gherkin <https://github.com/aslakhellesoy/gherkin/>` syntax.
.. versionadded:: 1.2
"""
name = 'Gherkin'
- aliases = ['gherkin', 'cucumber']
+ aliases = ['gherkin', 'cucumber']
filenames = ['*.feature']
mimetypes = ['text/x-gherkin']
- feature_keywords = '^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
- feature_element_keywords = '^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
- examples_keywords = '^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
- step_keywords = '^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\\* )'
+ feature_keywords = '^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
+ feature_element_keywords = '^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
+ examples_keywords = '^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
+ step_keywords = '^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\\* )'
tokens = {
'comments': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/text.py b/contrib/python/Pygments/py3/pygments/lexers/text.py
index 68e06594f7..154df79aaa 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/text.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/text.py
@@ -4,7 +4,7 @@
Lexers for non-source code file types.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/textedit.py b/contrib/python/Pygments/py3/pygments/lexers/textedit.py
index 0e567bca1d..693b83ed92 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/textedit.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/textedit.py
@@ -4,7 +4,7 @@
Lexers for languages related to text processing.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -67,8 +67,8 @@ class AwkLexer(RegexLexer):
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
]
}
@@ -135,9 +135,9 @@ class VimLexer(RegexLexer):
(r'[ \t]+', Text),
# TODO: regexes can have other delims
- (r'/[^/\\\n]*(?:\\[\s\S][^/\\\n]*)*/', String.Regex),
- (r'"[^"\\\n]*(?:\\[\s\S][^"\\\n]*)*"', String.Double),
- (r"'[^\n']*(?:''[^\n']*)*'", String.Single),
+ (r'/[^/\\\n]*(?:\\[\s\S][^/\\\n]*)*/', String.Regex),
+ (r'"[^"\\\n]*(?:\\[\s\S][^"\\\n]*)*"', String.Double),
+ (r"'[^\n']*(?:''[^\n']*)*'", String.Single),
# Who decided that doublequote was a good comment character??
(r'(?<=\s)"[^\-:.%#=*].*', Comment),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/textfmts.py b/contrib/python/Pygments/py3/pygments/lexers/textfmts.py
index 62d300a5b5..6accc67763 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/textfmts.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/textfmts.py
@@ -4,20 +4,20 @@
Lexers for various text formats.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexers import guess_lexer, get_lexer_by_name
-from pygments.lexer import RegexLexer, bygroups, default, include
+from pygments.lexers import guess_lexer, get_lexer_by_name
+from pygments.lexer import RegexLexer, bygroups, default, include
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Generic, Literal, Punctuation
+ Number, Generic, Literal, Punctuation
from pygments.util import ClassNotFound
-__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer',
- 'NotmuchLexer', 'KernelLogLexer']
+__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer',
+ 'NotmuchLexer', 'KernelLogLexer']
class IrcLogsLexer(RegexLexer):
@@ -174,13 +174,13 @@ class HttpLexer(RegexLexer):
tokens = {
'root': [
(r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
- r'(HTTP)(/)(1\.[01]|2(?:\.0)?|3)(\r?\n|\Z)',
+ r'(HTTP)(/)(1\.[01]|2(?:\.0)?|3)(\r?\n|\Z)',
bygroups(Name.Function, Text, Name.Namespace, Text,
Keyword.Reserved, Operator, Number, Text),
'headers'),
- (r'(HTTP)(/)(1\.[01]|2(?:\.0)?|3)( +)(\d{3})(?:( +)([^\r\n]*))?(\r?\n|\Z)',
- bygroups(Keyword.Reserved, Operator, Number, Text, Number, Text,
- Name.Exception, Text),
+ (r'(HTTP)(/)(1\.[01]|2(?:\.0)?|3)( +)(\d{3})(?:( +)([^\r\n]*))?(\r?\n|\Z)',
+ bygroups(Keyword.Reserved, Operator, Number, Text, Number, Text,
+ Name.Exception, Text),
'headers'),
],
'headers': [
@@ -296,134 +296,134 @@ class TodotxtLexer(RegexLexer):
(r'\s+', IncompleteTaskText),
],
}
-
-
-class NotmuchLexer(RegexLexer):
- """
- For `Notmuch <https://notmuchmail.org/>`_ email text format.
-
- .. versionadded:: 2.5
-
- Additional options accepted:
-
- `body_lexer`
- If given, highlight the contents of the message body with the specified
- lexer, else guess it according to the body content (default: ``None``).
- """
-
- name = 'Notmuch'
- aliases = ['notmuch']
-
- def _highlight_code(self, match):
- code = match.group(1)
-
- try:
- if self.body_lexer:
- lexer = get_lexer_by_name(self.body_lexer)
- else:
- lexer = guess_lexer(code.strip())
- except ClassNotFound:
- lexer = get_lexer_by_name('text')
-
- yield from lexer.get_tokens_unprocessed(code)
-
- tokens = {
- 'root': [
- (r'\fmessage\{\s*', Keyword, ('message', 'message-attr')),
- ],
- 'message-attr': [
- (r'(\s*id:\s*)(\S+)', bygroups(Name.Attribute, String)),
- (r'(\s*(?:depth|match|excluded):\s*)(\d+)',
- bygroups(Name.Attribute, Number.Integer)),
- (r'(\s*filename:\s*)(.+\n)',
- bygroups(Name.Attribute, String)),
- default('#pop'),
- ],
- 'message': [
- (r'\fmessage\}\n', Keyword, '#pop'),
- (r'\fheader\{\n', Keyword, 'header'),
- (r'\fbody\{\n', Keyword, 'body'),
- ],
- 'header': [
- (r'\fheader\}\n', Keyword, '#pop'),
- (r'((?:Subject|From|To|Cc|Date):\s*)(.*\n)',
- bygroups(Name.Attribute, String)),
- (r'(.*)(\s*\(.*\))(\s*\(.*\)\n)',
- bygroups(Generic.Strong, Literal, Name.Tag)),
- ],
- 'body': [
- (r'\fpart\{\n', Keyword, 'part'),
- (r'\f(part|attachment)\{\s*', Keyword, ('part', 'part-attr')),
- (r'\fbody\}\n', Keyword, '#pop'),
- ],
- 'part-attr': [
- (r'(ID:\s*)(\d+)', bygroups(Name.Attribute, Number.Integer)),
- (r'(,\s*)((?:Filename|Content-id):\s*)([^,]+)',
- bygroups(Punctuation, Name.Attribute, String)),
- (r'(,\s*)(Content-type:\s*)(.+\n)',
- bygroups(Punctuation, Name.Attribute, String)),
- default('#pop'),
- ],
- 'part': [
- (r'\f(?:part|attachment)\}\n', Keyword, '#pop'),
- (r'\f(?:part|attachment)\{\s*', Keyword, ('#push', 'part-attr')),
- (r'^Non-text part: .*\n', Comment),
- (r'(?s)(.*?(?=\f(?:part|attachment)\}\n))', _highlight_code),
- ],
- }
-
- def analyse_text(text):
- return 1.0 if text.startswith('\fmessage{') else 0.0
-
- def __init__(self, **options):
- self.body_lexer = options.get('body_lexer', None)
- RegexLexer.__init__(self, **options)
-
-
-class KernelLogLexer(RegexLexer):
- """
- For Linux Kernel log ("dmesg") output.
-
- .. versionadded:: 2.6
- """
- name = 'Kernel log'
- aliases = ['kmsg', 'dmesg']
- filenames = ['*.kmsg', '*.dmesg']
-
- tokens = {
- 'root': [
- (r'^[^:]+:debug : (?=\[)', Text, 'debug'),
- (r'^[^:]+:info : (?=\[)', Text, 'info'),
- (r'^[^:]+:warn : (?=\[)', Text, 'warn'),
- (r'^[^:]+:notice: (?=\[)', Text, 'warn'),
- (r'^[^:]+:err : (?=\[)', Text, 'error'),
- (r'^[^:]+:crit : (?=\[)', Text, 'error'),
- (r'^(?=\[)', Text, 'unknown'),
- ],
- 'unknown': [
- (r'^(?=.+(warning|notice|audit|deprecated))', Text, 'warn'),
- (r'^(?=.+(error|critical|fail|Bug))', Text, 'error'),
- default('info'),
- ],
- 'base': [
- (r'\[[0-9. ]+\] ', Number),
- (r'(?<=\] ).+?:', Keyword),
- (r'\n', Text, '#pop'),
- ],
- 'debug': [
- include('base'),
- (r'.+\n', Comment, '#pop')
- ],
- 'info': [
- include('base'),
- (r'.+\n', Text, '#pop')
- ],
- 'warn': [
- include('base'),
- (r'.+\n', Generic.Strong, '#pop')
- ],
- 'error': [
- include('base'),
- (r'.+\n', Generic.Error, '#pop')
- ]
- }
+
+
+class NotmuchLexer(RegexLexer):
+ """
+ For `Notmuch <https://notmuchmail.org/>`_ email text format.
+
+ .. versionadded:: 2.5
+
+ Additional options accepted:
+
+ `body_lexer`
+ If given, highlight the contents of the message body with the specified
+ lexer, else guess it according to the body content (default: ``None``).
+ """
+
+ name = 'Notmuch'
+ aliases = ['notmuch']
+
+ def _highlight_code(self, match):
+ code = match.group(1)
+
+ try:
+ if self.body_lexer:
+ lexer = get_lexer_by_name(self.body_lexer)
+ else:
+ lexer = guess_lexer(code.strip())
+ except ClassNotFound:
+ lexer = get_lexer_by_name('text')
+
+ yield from lexer.get_tokens_unprocessed(code)
+
+ tokens = {
+ 'root': [
+ (r'\fmessage\{\s*', Keyword, ('message', 'message-attr')),
+ ],
+ 'message-attr': [
+ (r'(\s*id:\s*)(\S+)', bygroups(Name.Attribute, String)),
+ (r'(\s*(?:depth|match|excluded):\s*)(\d+)',
+ bygroups(Name.Attribute, Number.Integer)),
+ (r'(\s*filename:\s*)(.+\n)',
+ bygroups(Name.Attribute, String)),
+ default('#pop'),
+ ],
+ 'message': [
+ (r'\fmessage\}\n', Keyword, '#pop'),
+ (r'\fheader\{\n', Keyword, 'header'),
+ (r'\fbody\{\n', Keyword, 'body'),
+ ],
+ 'header': [
+ (r'\fheader\}\n', Keyword, '#pop'),
+ (r'((?:Subject|From|To|Cc|Date):\s*)(.*\n)',
+ bygroups(Name.Attribute, String)),
+ (r'(.*)(\s*\(.*\))(\s*\(.*\)\n)',
+ bygroups(Generic.Strong, Literal, Name.Tag)),
+ ],
+ 'body': [
+ (r'\fpart\{\n', Keyword, 'part'),
+ (r'\f(part|attachment)\{\s*', Keyword, ('part', 'part-attr')),
+ (r'\fbody\}\n', Keyword, '#pop'),
+ ],
+ 'part-attr': [
+ (r'(ID:\s*)(\d+)', bygroups(Name.Attribute, Number.Integer)),
+ (r'(,\s*)((?:Filename|Content-id):\s*)([^,]+)',
+ bygroups(Punctuation, Name.Attribute, String)),
+ (r'(,\s*)(Content-type:\s*)(.+\n)',
+ bygroups(Punctuation, Name.Attribute, String)),
+ default('#pop'),
+ ],
+ 'part': [
+ (r'\f(?:part|attachment)\}\n', Keyword, '#pop'),
+ (r'\f(?:part|attachment)\{\s*', Keyword, ('#push', 'part-attr')),
+ (r'^Non-text part: .*\n', Comment),
+ (r'(?s)(.*?(?=\f(?:part|attachment)\}\n))', _highlight_code),
+ ],
+ }
+
+ def analyse_text(text):
+ return 1.0 if text.startswith('\fmessage{') else 0.0
+
+ def __init__(self, **options):
+ self.body_lexer = options.get('body_lexer', None)
+ RegexLexer.__init__(self, **options)
+
+
+class KernelLogLexer(RegexLexer):
+ """
+ For Linux Kernel log ("dmesg") output.
+
+ .. versionadded:: 2.6
+ """
+ name = 'Kernel log'
+ aliases = ['kmsg', 'dmesg']
+ filenames = ['*.kmsg', '*.dmesg']
+
+ tokens = {
+ 'root': [
+ (r'^[^:]+:debug : (?=\[)', Text, 'debug'),
+ (r'^[^:]+:info : (?=\[)', Text, 'info'),
+ (r'^[^:]+:warn : (?=\[)', Text, 'warn'),
+ (r'^[^:]+:notice: (?=\[)', Text, 'warn'),
+ (r'^[^:]+:err : (?=\[)', Text, 'error'),
+ (r'^[^:]+:crit : (?=\[)', Text, 'error'),
+ (r'^(?=\[)', Text, 'unknown'),
+ ],
+ 'unknown': [
+ (r'^(?=.+(warning|notice|audit|deprecated))', Text, 'warn'),
+ (r'^(?=.+(error|critical|fail|Bug))', Text, 'error'),
+ default('info'),
+ ],
+ 'base': [
+ (r'\[[0-9. ]+\] ', Number),
+ (r'(?<=\] ).+?:', Keyword),
+ (r'\n', Text, '#pop'),
+ ],
+ 'debug': [
+ include('base'),
+ (r'.+\n', Comment, '#pop')
+ ],
+ 'info': [
+ include('base'),
+ (r'.+\n', Text, '#pop')
+ ],
+ 'warn': [
+ include('base'),
+ (r'.+\n', Generic.Strong, '#pop')
+ ],
+ 'error': [
+ include('base'),
+ (r'.+\n', Generic.Error, '#pop')
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/theorem.py b/contrib/python/Pygments/py3/pygments/lexers/theorem.py
index a7f4330a54..e085a0fc7e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/theorem.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/theorem.py
@@ -4,7 +4,7 @@
Lexers for theorem-proving languages.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -29,8 +29,8 @@ class CoqLexer(RegexLexer):
filenames = ['*.v']
mimetypes = ['text/x-coq']
- flags = re.UNICODE
-
+ flags = re.UNICODE
+
keywords1 = (
# Vernacular commands
'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
@@ -56,7 +56,7 @@ class CoqLexer(RegexLexer):
)
keywords3 = (
# Sorts
- 'Type', 'Prop', 'SProp',
+ 'Type', 'Prop', 'SProp',
)
keywords4 = (
# Tactics
@@ -96,8 +96,8 @@ class CoqLexer(RegexLexer):
'<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
r'/\\', r'\\/', r'\{\|', r'\|\}',
- # 'Π', 'Σ', # Not defined in the standard library
- 'λ', '¬', '∧', '∨', '∀', '∃', '→', '↔', '≠', '≤', '≥',
+ # 'Π', 'Σ', # Not defined in the standard library
+ 'λ', '¬', '∧', '∨', '∀', '∃', '→', '↔', '≠', '≤', '≥',
)
operators = r'[!$%&*+\./:<=>?@^|~-]'
prefix_syms = r'[!?~]'
@@ -127,15 +127,15 @@ class CoqLexer(RegexLexer):
(r'0[bB][01][01_]*', Number.Bin),
(r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", String.Char),
-
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", String.Char),
+
(r"'.'", String.Char),
(r"'", Keyword), # a stray quote is another syntax element
(r'"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name),
- (r'\S', Name.Builtin.Pseudo),
+ (r'\S', Name.Builtin.Pseudo),
],
'comment': [
(r'[^(*)]+', Comment),
@@ -159,8 +159,8 @@ class CoqLexer(RegexLexer):
}
def analyse_text(text):
- if 'Qed' in text and 'Proof' in text:
- return 1
+ if 'Qed' in text and 'Proof' in text:
+ return 1
class IsabelleLexer(RegexLexer):
@@ -393,72 +393,72 @@ class LeanLexer(RegexLexer):
flags = re.MULTILINE | re.UNICODE
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'/--', String.Doc, 'docstring'),
- (r'/-', Comment, 'comment'),
- (r'--.*?$', Comment.Single),
- (words((
- 'import', 'renaming', 'hiding',
- 'namespace',
- 'local',
- 'private', 'protected', 'section',
- 'include', 'omit', 'section',
- 'protected', 'export',
- 'open',
- 'attribute',
- ), prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
- (words((
- 'lemma', 'theorem', 'def', 'definition', 'example',
- 'axiom', 'axioms', 'constant', 'constants',
- 'universe', 'universes',
- 'inductive', 'coinductive', 'structure', 'extends',
- 'class', 'instance',
- 'abbreviation',
-
- 'noncomputable theory',
-
- 'noncomputable', 'mutual', 'meta',
-
- 'attribute',
-
- 'parameter', 'parameters',
- 'variable', 'variables',
-
- 'reserve', 'precedence',
- 'postfix', 'prefix', 'notation', 'infix', 'infixl', 'infixr',
-
- 'begin', 'by', 'end',
-
- 'set_option',
- 'run_cmd',
- ), prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
- (r'@\[[^\]]*\]', Keyword.Declaration),
- (words((
- 'forall', 'fun', 'Pi', 'from', 'have', 'show', 'assume', 'suffices',
- 'let', 'if', 'else', 'then', 'in', 'with', 'calc', 'match',
- 'do'
- ), prefix=r'\b', suffix=r'\b'), Keyword),
- (words(('sorry', 'admit'), prefix=r'\b', suffix=r'\b'), Generic.Error),
- (words(('Sort', 'Prop', 'Type'), prefix=r'\b', suffix=r'\b'), Keyword.Type),
- (words((
- '#eval', '#check', '#reduce', '#exit',
- '#print', '#help',
- ), suffix=r'\b'), Keyword),
- (words((
- '(', ')', ':', '{', '}', '[', ']', '⟨', '⟩', '‹', '›', '⦃', '⦄', ':=', ',',
- )), Operator),
- (r'[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]'
- r'[.A-Za-z_\'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079'
- r'\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*', Name),
- (r'0x[A-Za-z0-9]+', Number.Integer),
- (r'0b[01]+', Number.Integer),
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'/--', String.Doc, 'docstring'),
+ (r'/-', Comment, 'comment'),
+ (r'--.*?$', Comment.Single),
+ (words((
+ 'import', 'renaming', 'hiding',
+ 'namespace',
+ 'local',
+ 'private', 'protected', 'section',
+ 'include', 'omit', 'section',
+ 'protected', 'export',
+ 'open',
+ 'attribute',
+ ), prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words((
+ 'lemma', 'theorem', 'def', 'definition', 'example',
+ 'axiom', 'axioms', 'constant', 'constants',
+ 'universe', 'universes',
+ 'inductive', 'coinductive', 'structure', 'extends',
+ 'class', 'instance',
+ 'abbreviation',
+
+ 'noncomputable theory',
+
+ 'noncomputable', 'mutual', 'meta',
+
+ 'attribute',
+
+ 'parameter', 'parameters',
+ 'variable', 'variables',
+
+ 'reserve', 'precedence',
+ 'postfix', 'prefix', 'notation', 'infix', 'infixl', 'infixr',
+
+ 'begin', 'by', 'end',
+
+ 'set_option',
+ 'run_cmd',
+ ), prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
+ (r'@\[[^\]]*\]', Keyword.Declaration),
+ (words((
+ 'forall', 'fun', 'Pi', 'from', 'have', 'show', 'assume', 'suffices',
+ 'let', 'if', 'else', 'then', 'in', 'with', 'calc', 'match',
+ 'do'
+ ), prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(('sorry', 'admit'), prefix=r'\b', suffix=r'\b'), Generic.Error),
+ (words(('Sort', 'Prop', 'Type'), prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (words((
+ '#eval', '#check', '#reduce', '#exit',
+ '#print', '#help',
+ ), suffix=r'\b'), Keyword),
+ (words((
+ '(', ')', ':', '{', '}', '[', ']', '⟨', '⟩', '‹', '›', '⦃', '⦄', ':=', ',',
+ )), Operator),
+ (r'[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]'
+ r'[.A-Za-z_\'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079'
+ r'\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*', Name),
+ (r'0x[A-Za-z0-9]+', Number.Integer),
+ (r'0b[01]+', Number.Integer),
(r'\d+', Number.Integer),
(r'"', String.Double, 'string'),
- (r"'(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4})|.)'", String.Char),
- (r'[~?][a-z][\w\']*:', Name.Variable),
- (r'\S', Name.Builtin.Pseudo),
+ (r"'(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4})|.)'", String.Char),
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ (r'\S', Name.Builtin.Pseudo),
],
'comment': [
(r'[^/-]', Comment.Multiline),
@@ -466,14 +466,14 @@ class LeanLexer(RegexLexer):
(r'-/', Comment.Multiline, '#pop'),
(r'[/-]', Comment.Multiline)
],
- 'docstring': [
- (r'[^/-]', String.Doc),
- (r'-/', String.Doc, '#pop'),
- (r'[/-]', String.Doc)
- ],
+ 'docstring': [
+ (r'[^/-]', String.Doc),
+ (r'-/', String.Doc, '#pop'),
+ (r'[/-]', String.Doc)
+ ],
'string': [
(r'[^\\"]+', String.Double),
- (r"(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4}))", String.Escape),
+ (r"(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4}))", String.Escape),
('"', String.Double, '#pop'),
],
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/thingsdb.py b/contrib/python/Pygments/py3/pygments/lexers/thingsdb.py
index 3b94da5744..1d30b6792b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/thingsdb.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/thingsdb.py
@@ -1,116 +1,116 @@
-"""
- pygments.lexers.thingsdb
+"""
+ pygments.lexers.thingsdb
~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the ThingsDB language.
-
+
+ Lexers for the ThingsDB language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Comment, Keyword, Name, Number, String, Text, \
- Operator, Punctuation, Whitespace
-
-__all__ = ['ThingsDBLexer']
-
-
-class ThingsDBLexer(RegexLexer):
- """
- Lexer for the ThingsDB programming language.
-
- .. versionadded:: 2.9
- """
- name = 'ThingsDB'
- aliases = ['ti', 'thingsdb']
- filenames = ['*.ti']
-
- tokens = {
- 'root': [
- include('expression'),
- ],
- 'expression': [
- include('comments'),
- include('whitespace'),
-
- # numbers
- (r'[-+]?0b[01]+', Number.Bin),
- (r'[-+]?0o[0-8]+', Number.Oct),
- (r'([-+]?0x[0-9a-fA-F]+)', Number.Hex),
- (r'[-+]?[0-9]+', Number.Integer),
- (r'[-+]?((inf|nan)([^0-9A-Za-z_]|$)|[0-9]*\.[0-9]+(e[+-][0-9]+)?)',
- Number.Float),
-
- # strings
- (r'(?:"(?:[^"]*)")+', String.Double),
- (r"(?:'(?:[^']*)')+", String.Single),
-
- # literals
- (r'(true|false|nil)\b', Keyword.Constant),
-
- # regular expressions
- (r'(/[^/\\]*(?:\\.[^/\\]*)*/i?)', String.Regex),
-
- # thing id's
- (r'#[0-9]+', Comment.Preproc),
-
- # name, assignments and functions
- include('names'),
-
- (r'[(){}\[\],;]', Punctuation),
- (r'[+\-*/%&|<>^!~@=:?]', Operator),
- ],
- 'names': [
- (r'(\.)'
- r'(add|call|contains|del|endswith|extend|filter|find|findindex|'
- r'get|has|id|indexof|keys|len|lower|map|pop|push|remove|set|sort|'
- r'splice|startswith|test|unwrap|upper|values|wrap)'
- r'(\()',
- bygroups(Name.Function, Name.Function, Punctuation), 'arguments'),
- (r'(array|assert|assert_err|auth_err|backup_info|backups_info|'
- r'bad_data_err|bool|closure|collection_info|collections_info|'
- r'counters|deep|del_backup|del_collection|del_expired|del_node|'
- r'del_procedure|del_token|del_type|del_user|err|float|'
- r'forbidden_err|grant|int|isarray|isascii|isbool|isbytes|iserr|'
- r'isfloat|isinf|isint|islist|isnan|isnil|israw|isset|isstr|'
- r'isthing|istuple|isutf8|lookup_err|max_quota_err|mod_type|new|'
- r'new_backup|new_collection|new_node|new_procedure|new_token|'
- r'new_type|new_user|node_err|node_info|nodes_info|now|'
- r'num_arguments_err|operation_err|overflow_err|procedure_doc|'
- r'procedure_info|procedures_info|raise|refs|rename_collection|'
- r'rename_user|reset_counters|return|revoke|run|set_log_level|set|'
- r'set_quota|set_type|shutdown|str|syntax_err|thing|try|type|'
- r'type_err|type_count|type_info|types_info|user_info|users_info|'
- r'value_err|wse|zero_div_err)'
- r'(\()',
- bygroups(Name.Function, Punctuation),
- 'arguments'),
- (r'(\.[A-Za-z_][0-9A-Za-z_]*)'
- r'(\s*)(=)',
- bygroups(Name.Attribute, Text, Operator)),
- (r'\.[A-Za-z_][0-9A-Za-z_]*', Name.Attribute),
- (r'([A-Za-z_][0-9A-Za-z_]*)(\s*)(=)',
- bygroups(Name.Variable, Text, Operator)),
- (r'[A-Za-z_][0-9A-Za-z_]*', Name.Variable),
- ],
- 'whitespace': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- ],
- 'comments': [
- (r'//(.*?)\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'arguments': [
- include('expression'),
- (',', Punctuation),
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Comment, Keyword, Name, Number, String, Text, \
+ Operator, Punctuation, Whitespace
+
+__all__ = ['ThingsDBLexer']
+
+
+class ThingsDBLexer(RegexLexer):
+ """
+ Lexer for the ThingsDB programming language.
+
+ .. versionadded:: 2.9
+ """
+ name = 'ThingsDB'
+ aliases = ['ti', 'thingsdb']
+ filenames = ['*.ti']
+
+ tokens = {
+ 'root': [
+ include('expression'),
+ ],
+ 'expression': [
+ include('comments'),
+ include('whitespace'),
+
+ # numbers
+ (r'[-+]?0b[01]+', Number.Bin),
+ (r'[-+]?0o[0-8]+', Number.Oct),
+ (r'([-+]?0x[0-9a-fA-F]+)', Number.Hex),
+ (r'[-+]?[0-9]+', Number.Integer),
+ (r'[-+]?((inf|nan)([^0-9A-Za-z_]|$)|[0-9]*\.[0-9]+(e[+-][0-9]+)?)',
+ Number.Float),
+
+ # strings
+ (r'(?:"(?:[^"]*)")+', String.Double),
+ (r"(?:'(?:[^']*)')+", String.Single),
+
+ # literals
+ (r'(true|false|nil)\b', Keyword.Constant),
+
+ # regular expressions
+ (r'(/[^/\\]*(?:\\.[^/\\]*)*/i?)', String.Regex),
+
+ # thing id's
+ (r'#[0-9]+', Comment.Preproc),
+
+ # name, assignments and functions
+ include('names'),
+
+ (r'[(){}\[\],;]', Punctuation),
+ (r'[+\-*/%&|<>^!~@=:?]', Operator),
+ ],
+ 'names': [
+ (r'(\.)'
+ r'(add|call|contains|del|endswith|extend|filter|find|findindex|'
+ r'get|has|id|indexof|keys|len|lower|map|pop|push|remove|set|sort|'
+ r'splice|startswith|test|unwrap|upper|values|wrap)'
+ r'(\()',
+ bygroups(Name.Function, Name.Function, Punctuation), 'arguments'),
+ (r'(array|assert|assert_err|auth_err|backup_info|backups_info|'
+ r'bad_data_err|bool|closure|collection_info|collections_info|'
+ r'counters|deep|del_backup|del_collection|del_expired|del_node|'
+ r'del_procedure|del_token|del_type|del_user|err|float|'
+ r'forbidden_err|grant|int|isarray|isascii|isbool|isbytes|iserr|'
+ r'isfloat|isinf|isint|islist|isnan|isnil|israw|isset|isstr|'
+ r'isthing|istuple|isutf8|lookup_err|max_quota_err|mod_type|new|'
+ r'new_backup|new_collection|new_node|new_procedure|new_token|'
+ r'new_type|new_user|node_err|node_info|nodes_info|now|'
+ r'num_arguments_err|operation_err|overflow_err|procedure_doc|'
+ r'procedure_info|procedures_info|raise|refs|rename_collection|'
+ r'rename_user|reset_counters|return|revoke|run|set_log_level|set|'
+ r'set_quota|set_type|shutdown|str|syntax_err|thing|try|type|'
+ r'type_err|type_count|type_info|types_info|user_info|users_info|'
+ r'value_err|wse|zero_div_err)'
+ r'(\()',
+ bygroups(Name.Function, Punctuation),
+ 'arguments'),
+ (r'(\.[A-Za-z_][0-9A-Za-z_]*)'
+ r'(\s*)(=)',
+ bygroups(Name.Attribute, Text, Operator)),
+ (r'\.[A-Za-z_][0-9A-Za-z_]*', Name.Attribute),
+ (r'([A-Za-z_][0-9A-Za-z_]*)(\s*)(=)',
+ bygroups(Name.Variable, Text, Operator)),
+ (r'[A-Za-z_][0-9A-Za-z_]*', Name.Variable),
+ ],
+ 'whitespace': [
+ (r'\n', Whitespace),
+ (r'\s+', Whitespace),
+ ],
+ 'comments': [
+ (r'//(.*?)\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ ],
+ 'comment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'arguments': [
+ include('expression'),
+ (',', Punctuation),
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/tnt.py b/contrib/python/Pygments/py3/pygments/lexers/tnt.py
index e6e71961d4..6944b4ebdf 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/tnt.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/tnt.py
@@ -1,272 +1,272 @@
-"""
- pygments.lexers.tnt
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for Typographic Number Theory.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, Number, \
- Punctuation, Error
-
-__all__ = ['TNTLexer']
-
-
-class TNTLexer(Lexer):
- """
- Lexer for Typographic Number Theory, as described in the book
- Gödel, Escher, Bach, by Douglas R. Hofstadter,
- or as summarized here:
- https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt
-
- .. versionadded:: 2.7
- """
-
- name = 'Typographic Number Theory'
- aliases = ['tnt']
- filenames = ['*.tnt']
-
- cur = []
-
- LOGIC = set('⊃→]&∧^|∨Vv')
- OPERATORS = set('+.⋅*')
- VARIABLES = set('abcde')
- PRIMES = set("'′")
- NEGATORS = set('~!')
- QUANTIFIERS = set('AE∀∃')
- NUMBERS = set('0123456789')
- WHITESPACE = set('\t \v\n')
-
- RULES = re.compile('''(?xi)
- joining | separation | double-tilde | fantasy\\ rule
- | carry[- ]over(?:\\ of)?(?:\\ line)?\\ ([0-9]+) | detachment
- | contrapositive | De\\ Morgan | switcheroo
- | specification | generalization | interchange
- | existence | symmetry | transitivity
- | add\\ S | drop\\ S | induction
- | axiom\\ ([1-5]) | premise | push | pop
- ''')
- LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*')
- COMMENT = re.compile(r'\[[^\n\]]+\]')
-
- def __init__(self, *args, **kwargs):
- Lexer.__init__(self, *args, **kwargs)
- self.cur = []
-
- def whitespace(self, start, text, required=False):
- """Tokenize whitespace."""
- end = start
- try:
- while text[end] in self.WHITESPACE:
- end += 1
- except IndexError:
- end = len(text)
- if required and end == start:
- raise AssertionError
- if end != start:
- self.cur.append((start, Text, text[start:end]))
- return end
-
- def variable(self, start, text):
- """Tokenize a variable."""
- if text[start] not in self.VARIABLES:
- raise AssertionError
- end = start+1
- while text[end] in self.PRIMES:
- end += 1
- self.cur.append((start, Name.Variable, text[start:end]))
- return end
-
- def term(self, start, text):
- """Tokenize a term."""
- if text[start] == 'S': # S...S(...) or S...0
- end = start+1
- while text[end] == 'S':
- end += 1
- self.cur.append((start, Number.Integer, text[start:end]))
- return self.term(end, text)
- if text[start] == '0': # the singleton 0
- self.cur.append((start, Number.Integer, text[start]))
- return start+1
- if text[start] in self.VARIABLES: # a''...
- return self.variable(start, text)
- if text[start] == '(': # (...+...)
- self.cur.append((start, Punctuation, text[start]))
- start = self.term(start+1, text)
- if text[start] not in self.OPERATORS:
- raise AssertionError
- self.cur.append((start, Operator, text[start]))
- start = self.term(start+1, text)
- if text[start] != ')':
- raise AssertionError
- self.cur.append((start, Punctuation, text[start]))
- return start+1
- raise AssertionError # no matches
-
- def formula(self, start, text):
- """Tokenize a formula."""
- if text[start] in self.NEGATORS: # ~<...>
- end = start+1
- while text[end] in self.NEGATORS:
- end += 1
- self.cur.append((start, Operator, text[start:end]))
- return self.formula(end, text)
- if text[start] in self.QUANTIFIERS: # Aa:<...>
- self.cur.append((start, Keyword.Declaration, text[start]))
- start = self.variable(start+1, text)
- if text[start] != ':':
- raise AssertionError
- self.cur.append((start, Punctuation, text[start]))
- return self.formula(start+1, text)
- if text[start] == '<': # <...&...>
- self.cur.append((start, Punctuation, text[start]))
- start = self.formula(start+1, text)
- if text[start] not in self.LOGIC:
- raise AssertionError
- self.cur.append((start, Operator, text[start]))
- start = self.formula(start+1, text)
- if text[start] != '>':
- raise AssertionError
- self.cur.append((start, Punctuation, text[start]))
- return start+1
- # ...=...
- start = self.term(start, text)
- if text[start] != '=':
- raise AssertionError
- self.cur.append((start, Operator, text[start]))
- start = self.term(start+1, text)
- return start
-
- def rule(self, start, text):
- """Tokenize a rule."""
- match = self.RULES.match(text, start)
- if match is None:
- raise AssertionError
- groups = sorted(match.regs[1:]) # exclude whole match
- for group in groups:
- if group[0] >= 0: # this group matched
- self.cur.append((start, Keyword, text[start:group[0]]))
- self.cur.append((group[0], Number.Integer,
- text[group[0]:group[1]]))
- if group[1] != match.end():
- self.cur.append((group[1], Keyword,
- text[group[1]:match.end()]))
- break
- else:
- self.cur.append((start, Keyword, text[start:match.end()]))
- return match.end()
-
- def lineno(self, start, text):
- """Tokenize a line referral."""
- end = start
- while text[end] not in self.NUMBERS:
- end += 1
- self.cur.append((start, Punctuation, text[start]))
- self.cur.append((start+1, Text, text[start+1:end]))
- start = end
- match = self.LINENOS.match(text, start)
- if match is None:
- raise AssertionError
- if text[match.end()] != ')':
- raise AssertionError
- self.cur.append((match.start(), Number.Integer, match.group(0)))
- self.cur.append((match.end(), Punctuation, text[match.end()]))
- return match.end() + 1
-
- def error_till_line_end(self, start, text):
- """Mark everything from ``start`` to the end of the line as Error."""
- end = start
- try:
- while text[end] != '\n': # there's whitespace in rules
- end += 1
- except IndexError:
- end = len(text)
- if end != start:
- self.cur.append((start, Error, text[start:end]))
- end = self.whitespace(end, text)
- return end
-
- def get_tokens_unprocessed(self, text):
- """Returns a list of TNT tokens."""
- self.cur = []
- start = end = self.whitespace(0, text)
- while start <= end < len(text):
- try:
- # try line number
- while text[end] in self.NUMBERS:
- end += 1
- if end != start: # actual number present
- self.cur.append((start, Number.Integer, text[start:end]))
- # whitespace is required after a line number
- orig = len(self.cur)
- try:
- start = end = self.whitespace(end, text, True)
- except AssertionError:
- del self.cur[orig:]
- start = end = self.error_till_line_end(end, text)
- continue
- # at this point it could be a comment
- match = self.COMMENT.match(text, start)
- if match is not None:
- self.cur.append((start, Comment, text[start:match.end()]))
- start = end = match.end()
- # anything after the closing bracket is invalid
- start = end = self.error_till_line_end(start, text)
- # do not attempt to process the rest
- continue
- del match
- if text[start] in '[]': # fantasy push or pop
- self.cur.append((start, Keyword, text[start]))
- start += 1
- end += 1
- else:
- # one formula, possibly containing subformulae
- orig = len(self.cur)
- try:
- start = end = self.formula(start, text)
- except (AssertionError, RecursionError): # not well-formed
- del self.cur[orig:]
- while text[end] not in self.WHITESPACE:
- end += 1
- self.cur.append((start, Error, text[start:end]))
- start = end
- # skip whitespace after formula
- orig = len(self.cur)
- try:
- start = end = self.whitespace(end, text, True)
- except AssertionError:
- del self.cur[orig:]
- start = end = self.error_till_line_end(start, text)
- continue
- # rule proving this formula a theorem
- orig = len(self.cur)
- try:
- start = end = self.rule(start, text)
- except AssertionError:
- del self.cur[orig:]
- start = end = self.error_till_line_end(start, text)
- continue
- # skip whitespace after rule
- start = end = self.whitespace(end, text)
- # line marker
- if text[start] == '(':
- orig = len(self.cur)
- try:
- start = end = self.lineno(start, text)
- except AssertionError:
- del self.cur[orig:]
- start = end = self.error_till_line_end(start, text)
- continue
- start = end = self.whitespace(start, text)
- except IndexError:
- try:
- del self.cur[orig:]
- except NameError:
- pass # if orig was never defined, fine
- self.error_till_line_end(start, text)
- return self.cur
+"""
+ pygments.lexers.tnt
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Typographic Number Theory.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, Number, \
+ Punctuation, Error
+
+__all__ = ['TNTLexer']
+
+
+class TNTLexer(Lexer):
+ """
+ Lexer for Typographic Number Theory, as described in the book
+ Gödel, Escher, Bach, by Douglas R. Hofstadter,
+ or as summarized here:
+ https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt
+
+ .. versionadded:: 2.7
+ """
+
+ name = 'Typographic Number Theory'
+ aliases = ['tnt']
+ filenames = ['*.tnt']
+
+ cur = []
+
+ LOGIC = set('⊃→]&∧^|∨Vv')
+ OPERATORS = set('+.⋅*')
+ VARIABLES = set('abcde')
+ PRIMES = set("'′")
+ NEGATORS = set('~!')
+ QUANTIFIERS = set('AE∀∃')
+ NUMBERS = set('0123456789')
+ WHITESPACE = set('\t \v\n')
+
+ RULES = re.compile('''(?xi)
+ joining | separation | double-tilde | fantasy\\ rule
+ | carry[- ]over(?:\\ of)?(?:\\ line)?\\ ([0-9]+) | detachment
+ | contrapositive | De\\ Morgan | switcheroo
+ | specification | generalization | interchange
+ | existence | symmetry | transitivity
+ | add\\ S | drop\\ S | induction
+ | axiom\\ ([1-5]) | premise | push | pop
+ ''')
+ LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*')
+ COMMENT = re.compile(r'\[[^\n\]]+\]')
+
+ def __init__(self, *args, **kwargs):
+ Lexer.__init__(self, *args, **kwargs)
+ self.cur = []
+
+ def whitespace(self, start, text, required=False):
+ """Tokenize whitespace."""
+ end = start
+ try:
+ while text[end] in self.WHITESPACE:
+ end += 1
+ except IndexError:
+ end = len(text)
+ if required and end == start:
+ raise AssertionError
+ if end != start:
+ self.cur.append((start, Text, text[start:end]))
+ return end
+
+ def variable(self, start, text):
+ """Tokenize a variable."""
+ if text[start] not in self.VARIABLES:
+ raise AssertionError
+ end = start+1
+ while text[end] in self.PRIMES:
+ end += 1
+ self.cur.append((start, Name.Variable, text[start:end]))
+ return end
+
+ def term(self, start, text):
+ """Tokenize a term."""
+ if text[start] == 'S': # S...S(...) or S...0
+ end = start+1
+ while text[end] == 'S':
+ end += 1
+ self.cur.append((start, Number.Integer, text[start:end]))
+ return self.term(end, text)
+ if text[start] == '0': # the singleton 0
+ self.cur.append((start, Number.Integer, text[start]))
+ return start+1
+ if text[start] in self.VARIABLES: # a''...
+ return self.variable(start, text)
+ if text[start] == '(': # (...+...)
+ self.cur.append((start, Punctuation, text[start]))
+ start = self.term(start+1, text)
+ if text[start] not in self.OPERATORS:
+ raise AssertionError
+ self.cur.append((start, Operator, text[start]))
+ start = self.term(start+1, text)
+ if text[start] != ')':
+ raise AssertionError
+ self.cur.append((start, Punctuation, text[start]))
+ return start+1
+ raise AssertionError # no matches
+
+ def formula(self, start, text):
+ """Tokenize a formula."""
+ if text[start] in self.NEGATORS: # ~<...>
+ end = start+1
+ while text[end] in self.NEGATORS:
+ end += 1
+ self.cur.append((start, Operator, text[start:end]))
+ return self.formula(end, text)
+ if text[start] in self.QUANTIFIERS: # Aa:<...>
+ self.cur.append((start, Keyword.Declaration, text[start]))
+ start = self.variable(start+1, text)
+ if text[start] != ':':
+ raise AssertionError
+ self.cur.append((start, Punctuation, text[start]))
+ return self.formula(start+1, text)
+ if text[start] == '<': # <...&...>
+ self.cur.append((start, Punctuation, text[start]))
+ start = self.formula(start+1, text)
+ if text[start] not in self.LOGIC:
+ raise AssertionError
+ self.cur.append((start, Operator, text[start]))
+ start = self.formula(start+1, text)
+ if text[start] != '>':
+ raise AssertionError
+ self.cur.append((start, Punctuation, text[start]))
+ return start+1
+ # ...=...
+ start = self.term(start, text)
+ if text[start] != '=':
+ raise AssertionError
+ self.cur.append((start, Operator, text[start]))
+ start = self.term(start+1, text)
+ return start
+
+ def rule(self, start, text):
+ """Tokenize a rule."""
+ match = self.RULES.match(text, start)
+ if match is None:
+ raise AssertionError
+ groups = sorted(match.regs[1:]) # exclude whole match
+ for group in groups:
+ if group[0] >= 0: # this group matched
+ self.cur.append((start, Keyword, text[start:group[0]]))
+ self.cur.append((group[0], Number.Integer,
+ text[group[0]:group[1]]))
+ if group[1] != match.end():
+ self.cur.append((group[1], Keyword,
+ text[group[1]:match.end()]))
+ break
+ else:
+ self.cur.append((start, Keyword, text[start:match.end()]))
+ return match.end()
+
+ def lineno(self, start, text):
+ """Tokenize a line referral."""
+ end = start
+ while text[end] not in self.NUMBERS:
+ end += 1
+ self.cur.append((start, Punctuation, text[start]))
+ self.cur.append((start+1, Text, text[start+1:end]))
+ start = end
+ match = self.LINENOS.match(text, start)
+ if match is None:
+ raise AssertionError
+ if text[match.end()] != ')':
+ raise AssertionError
+ self.cur.append((match.start(), Number.Integer, match.group(0)))
+ self.cur.append((match.end(), Punctuation, text[match.end()]))
+ return match.end() + 1
+
+ def error_till_line_end(self, start, text):
+ """Mark everything from ``start`` to the end of the line as Error."""
+ end = start
+ try:
+ while text[end] != '\n': # there's whitespace in rules
+ end += 1
+ except IndexError:
+ end = len(text)
+ if end != start:
+ self.cur.append((start, Error, text[start:end]))
+ end = self.whitespace(end, text)
+ return end
+
+ def get_tokens_unprocessed(self, text):
+ """Returns a list of TNT tokens."""
+ self.cur = []
+ start = end = self.whitespace(0, text)
+ while start <= end < len(text):
+ try:
+ # try line number
+ while text[end] in self.NUMBERS:
+ end += 1
+ if end != start: # actual number present
+ self.cur.append((start, Number.Integer, text[start:end]))
+ # whitespace is required after a line number
+ orig = len(self.cur)
+ try:
+ start = end = self.whitespace(end, text, True)
+ except AssertionError:
+ del self.cur[orig:]
+ start = end = self.error_till_line_end(end, text)
+ continue
+ # at this point it could be a comment
+ match = self.COMMENT.match(text, start)
+ if match is not None:
+ self.cur.append((start, Comment, text[start:match.end()]))
+ start = end = match.end()
+ # anything after the closing bracket is invalid
+ start = end = self.error_till_line_end(start, text)
+ # do not attempt to process the rest
+ continue
+ del match
+ if text[start] in '[]': # fantasy push or pop
+ self.cur.append((start, Keyword, text[start]))
+ start += 1
+ end += 1
+ else:
+ # one formula, possibly containing subformulae
+ orig = len(self.cur)
+ try:
+ start = end = self.formula(start, text)
+ except (AssertionError, RecursionError): # not well-formed
+ del self.cur[orig:]
+ while text[end] not in self.WHITESPACE:
+ end += 1
+ self.cur.append((start, Error, text[start:end]))
+ start = end
+ # skip whitespace after formula
+ orig = len(self.cur)
+ try:
+ start = end = self.whitespace(end, text, True)
+ except AssertionError:
+ del self.cur[orig:]
+ start = end = self.error_till_line_end(start, text)
+ continue
+ # rule proving this formula a theorem
+ orig = len(self.cur)
+ try:
+ start = end = self.rule(start, text)
+ except AssertionError:
+ del self.cur[orig:]
+ start = end = self.error_till_line_end(start, text)
+ continue
+ # skip whitespace after rule
+ start = end = self.whitespace(end, text)
+ # line marker
+ if text[start] == '(':
+ orig = len(self.cur)
+ try:
+ start = end = self.lineno(start, text)
+ except AssertionError:
+ del self.cur[orig:]
+ start = end = self.error_till_line_end(start, text)
+ continue
+ start = end = self.whitespace(start, text)
+ except IndexError:
+ try:
+ del self.cur[orig:]
+ except NameError:
+ pass # if orig was never defined, fine
+ self.error_till_line_end(start, text)
+ return self.cur
diff --git a/contrib/python/Pygments/py3/pygments/lexers/trafficscript.py b/contrib/python/Pygments/py3/pygments/lexers/trafficscript.py
index 67ecd243cb..64a566d253 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/trafficscript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/trafficscript.py
@@ -4,7 +4,7 @@
Lexer for RiverBed's TrafficScript (RTS) language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,7 +21,7 @@ class RtsLexer(RegexLexer):
.. versionadded:: 2.1
"""
name = 'TrafficScript'
- aliases = ['trafficscript', 'rts']
+ aliases = ['trafficscript', 'rts']
filenames = ['*.rts']
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/typoscript.py b/contrib/python/Pygments/py3/pygments/lexers/typoscript.py
index b2e4299beb..d2a13cf437 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/typoscript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/typoscript.py
@@ -13,7 +13,7 @@
`TypoScriptHtmlDataLexer`
Lexer that highlights markers, constants and registers within html tags.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/unicon.py b/contrib/python/Pygments/py3/pygments/lexers/unicon.py
index 4a76a0f821..708bc3490b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/unicon.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/unicon.py
@@ -4,7 +4,7 @@
Lexers for the Icon and Unicon languages, including ucode VM.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -128,15 +128,15 @@ class UniconLexer(RegexLexer):
'WSync'), prefix=r'\b', suffix=r'\b'),
Name.Function),
include('numbers'),
- (r'<@|<<@|>@|>>@|\.>|->|===|~===|\*\*|\+\+|--|\.|~==|~=|<=|>=|==|'
- r'=|<<=|<<|>>=|>>|:=:|:=|->|<->|\+:=|\|', Operator),
+ (r'<@|<<@|>@|>>@|\.>|->|===|~===|\*\*|\+\+|--|\.|~==|~=|<=|>=|==|'
+ r'=|<<=|<<|>>=|>>|:=:|:=|->|<->|\+:=|\|', Operator),
(r'"(?:[^\\"]|\\.)*"', String),
(r"'(?:[^\\']|\\.)*'", String.Character),
(r'[*<>+=/&!?@~\\-]', Operator),
(r'\^', Operator),
(r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
- (r"[\[\]]", Punctuation),
- (r"<>|=>|[()|:;,.'`{}%&?]", Punctuation),
+ (r"[\[\]]", Punctuation),
+ (r"<>|=>|[()|:;,.'`{}%&?]", Punctuation),
(r'\n+', Text),
],
'numbers': [
@@ -269,14 +269,14 @@ class IconLexer(RegexLexer):
'WSync'), prefix=r'\b', suffix=r'\b'),
Name.Function),
include('numbers'),
- (r'===|~===|\*\*|\+\+|--|\.|==|~==|<=|>=|=|~=|<<=|<<|>>=|>>|'
- r':=:|:=|<->|<-|\+:=|\|\||\|', Operator),
+ (r'===|~===|\*\*|\+\+|--|\.|==|~==|<=|>=|=|~=|<<=|<<|>>=|>>|'
+ r':=:|:=|<->|<-|\+:=|\|\||\|', Operator),
(r'"(?:[^\\"]|\\.)*"', String),
(r"'(?:[^\\']|\\.)*'", String.Character),
(r'[*<>+=/&!?@~\\-]', Operator),
(r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
- (r"[\[\]]", Punctuation),
- (r"<>|=>|[()|:;,.'`{}%\^&?]", Punctuation),
+ (r"[\[\]]", Punctuation),
+ (r"<>|=>|[()|:;,.'`{}%\^&?]", Punctuation),
(r'\n+', Text),
],
'numbers': [
@@ -384,28 +384,28 @@ class UcodeLexer(RegexLexer):
(r'[\w-]+', Text),
],
}
-
- def analyse_text(text):
- """endsuspend and endrepeat are unique to this language, and
- \\self, /self doesn't seem to get used anywhere else either."""
- result = 0
-
- if 'endsuspend' in text:
- result += 0.1
-
- if 'endrepeat' in text:
- result += 0.1
-
- if ':=' in text:
- result += 0.01
-
- if 'procedure' in text and 'end' in text:
- result += 0.01
-
- # This seems quite unique to unicon -- doesn't appear in any other
- # example source we have (A quick search reveals that \SELF appears in
- # Perl/Raku code)
- if r'\self' in text and r'/self' in text:
- result += 0.5
-
- return result
+
+ def analyse_text(text):
+ """endsuspend and endrepeat are unique to this language, and
+ \\self, /self doesn't seem to get used anywhere else either."""
+ result = 0
+
+ if 'endsuspend' in text:
+ result += 0.1
+
+ if 'endrepeat' in text:
+ result += 0.1
+
+ if ':=' in text:
+ result += 0.01
+
+ if 'procedure' in text and 'end' in text:
+ result += 0.01
+
+ # This seems quite unique to unicon -- doesn't appear in any other
+ # example source we have (A quick search reveals that \SELF appears in
+ # Perl/Raku code)
+ if r'\self' in text and r'/self' in text:
+ result += 0.5
+
+ return result
diff --git a/contrib/python/Pygments/py3/pygments/lexers/urbi.py b/contrib/python/Pygments/py3/pygments/lexers/urbi.py
index d9c1c9f82c..be1298375b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/urbi.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/urbi.py
@@ -4,7 +4,7 @@
Lexers for UrbiScript language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -116,11 +116,11 @@ class UrbiscriptLexer(ExtendedRegexLexer):
],
'string.double': [
(r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
- (r'(\\\\|\\[^\\]|[^"\\])*?"', String.Double, '#pop'),
+ (r'(\\\\|\\[^\\]|[^"\\])*?"', String.Double, '#pop'),
],
'string.single': [
(r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
- (r"(\\\\|\\[^\\]|[^'\\])*?'", String.Single, '#pop'),
+ (r"(\\\\|\\[^\\]|[^'\\])*?'", String.Single, '#pop'),
],
# from http://pygments.org/docs/lexerdevelopment/#changing-states
'comment': [
@@ -130,16 +130,16 @@ class UrbiscriptLexer(ExtendedRegexLexer):
(r'[*/]', Comment.Multiline),
]
}
-
- def analyse_text(text):
- """This is fairly similar to C and others, but freezeif and
- waituntil are unique keywords."""
- result = 0
-
- if 'freezeif' in text:
- result += 0.05
-
- if 'waituntil' in text:
- result += 0.05
-
- return result
+
+ def analyse_text(text):
+ """This is fairly similar to C and others, but freezeif and
+ waituntil are unique keywords."""
+ result = 0
+
+ if 'freezeif' in text:
+ result += 0.05
+
+ if 'waituntil' in text:
+ result += 0.05
+
+ return result
diff --git a/contrib/python/Pygments/py3/pygments/lexers/usd.py b/contrib/python/Pygments/py3/pygments/lexers/usd.py
index 2b4ace862c..c5dbf14812 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/usd.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/usd.py
@@ -1,89 +1,89 @@
-"""
- pygments.lexers.usd
- ~~~~~~~~~~~~~~~~~~~
-
- The module that parses Pixar's Universal Scene Description file format.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.lexer import words as words_
-from pygments.lexers._usd_builtins import COMMON_ATTRIBUTES, KEYWORDS, \
- OPERATORS, SPECIAL_NAMES, TYPES
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
- Punctuation, String, Text, Whitespace
-
-__all__ = ["UsdLexer"]
-
-
-def _keywords(words, type_):
- return [(words_(words, prefix=r"\b", suffix=r"\b"), type_)]
-
-
-_TYPE = r"(\w+(?:\[\])?)"
-_BASE_ATTRIBUTE = r"(\w+(?:\:\w+)*)(?:(\.)(timeSamples))?"
-_WHITESPACE = r"([ \t]+)"
-
-
-class UsdLexer(RegexLexer):
- """
- A lexer that parses Pixar's Universal Scene Description file format.
-
- .. versionadded:: 2.6
- """
-
- name = "USD"
- aliases = ["usd", "usda"]
- filenames = ["*.usd", "*.usda"]
-
- tokens = {
- "root": [
- (r"(custom){_WHITESPACE}(uniform)(\s+){}(\s+){}(\s*)(=)".format(
- _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
- bygroups(Keyword.Token, Whitespace, Keyword.Token, Whitespace,
- Keyword.Type, Whitespace, Name.Attribute, Text,
- Name.Keyword.Tokens, Whitespace, Operator)),
- (r"(custom){_WHITESPACE}{}(\s+){}(\s*)(=)".format(
- _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
- bygroups(Keyword.Token, Whitespace, Keyword.Type, Whitespace,
- Name.Attribute, Text, Name.Keyword.Tokens, Whitespace,
- Operator)),
- (r"(uniform){_WHITESPACE}{}(\s+){}(\s*)(=)".format(
- _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
- bygroups(Keyword.Token, Whitespace, Keyword.Type, Whitespace,
- Name.Attribute, Text, Name.Keyword.Tokens, Whitespace,
- Operator)),
- (r"{}{_WHITESPACE}{}(\s*)(=)".format(
- _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
- bygroups(Keyword.Type, Whitespace, Name.Attribute, Text,
- Name.Keyword.Tokens, Whitespace, Operator)),
- ] +
- _keywords(KEYWORDS, Keyword.Tokens) +
- _keywords(SPECIAL_NAMES, Name.Builtins) +
- _keywords(COMMON_ATTRIBUTES, Name.Attribute) +
- [(r"\b\w+:[\w:]+\b", Name.Attribute)] +
- _keywords(OPERATORS, Operator) + # more attributes
- [(type_ + r"\[\]", Keyword.Type) for type_ in TYPES] +
- _keywords(TYPES, Keyword.Type) +
- [
- (r"[(){}\[\]]", Punctuation),
- ("#.*?$", Comment.Single),
- (",", Punctuation),
- (";", Punctuation), # ";"s are allowed to combine separate metadata lines
- ("=", Operator),
- (r"[-]*([0-9]*[.])?[0-9]+(?:e[+-]*\d+)?", Number),
- (r"'''(?:.|\n)*?'''", String),
- (r'"""(?:.|\n)*?"""', String),
- (r"'.*?'", String),
- (r'".*?"', String),
- (r"<(\.\./)*([\w/]+|[\w/]+\.\w+[\w:]*)>", Name.Namespace),
- (r"@.*?@", String.Interpol),
- (r'\(.*"[.\\n]*".*\)', String.Doc),
- (r"\A#usda .+$", Comment.Hashbang),
- (r"\s+", Whitespace),
- (r"\w+", Text),
- (r"[_:.]+", Punctuation),
- ],
- }
+"""
+ pygments.lexers.usd
+ ~~~~~~~~~~~~~~~~~~~
+
+ The module that parses Pixar's Universal Scene Description file format.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.lexer import words as words_
+from pygments.lexers._usd_builtins import COMMON_ATTRIBUTES, KEYWORDS, \
+ OPERATORS, SPECIAL_NAMES, TYPES
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+ Punctuation, String, Text, Whitespace
+
+__all__ = ["UsdLexer"]
+
+
+def _keywords(words, type_):
+ return [(words_(words, prefix=r"\b", suffix=r"\b"), type_)]
+
+
+_TYPE = r"(\w+(?:\[\])?)"
+_BASE_ATTRIBUTE = r"(\w+(?:\:\w+)*)(?:(\.)(timeSamples))?"
+_WHITESPACE = r"([ \t]+)"
+
+
+class UsdLexer(RegexLexer):
+ """
+ A lexer that parses Pixar's Universal Scene Description file format.
+
+ .. versionadded:: 2.6
+ """
+
+ name = "USD"
+ aliases = ["usd", "usda"]
+ filenames = ["*.usd", "*.usda"]
+
+ tokens = {
+ "root": [
+ (r"(custom){_WHITESPACE}(uniform)(\s+){}(\s+){}(\s*)(=)".format(
+ _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
+ bygroups(Keyword.Token, Whitespace, Keyword.Token, Whitespace,
+ Keyword.Type, Whitespace, Name.Attribute, Text,
+ Name.Keyword.Tokens, Whitespace, Operator)),
+ (r"(custom){_WHITESPACE}{}(\s+){}(\s*)(=)".format(
+ _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
+ bygroups(Keyword.Token, Whitespace, Keyword.Type, Whitespace,
+ Name.Attribute, Text, Name.Keyword.Tokens, Whitespace,
+ Operator)),
+ (r"(uniform){_WHITESPACE}{}(\s+){}(\s*)(=)".format(
+ _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
+ bygroups(Keyword.Token, Whitespace, Keyword.Type, Whitespace,
+ Name.Attribute, Text, Name.Keyword.Tokens, Whitespace,
+ Operator)),
+ (r"{}{_WHITESPACE}{}(\s*)(=)".format(
+ _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
+ bygroups(Keyword.Type, Whitespace, Name.Attribute, Text,
+ Name.Keyword.Tokens, Whitespace, Operator)),
+ ] +
+ _keywords(KEYWORDS, Keyword.Tokens) +
+ _keywords(SPECIAL_NAMES, Name.Builtins) +
+ _keywords(COMMON_ATTRIBUTES, Name.Attribute) +
+ [(r"\b\w+:[\w:]+\b", Name.Attribute)] +
+ _keywords(OPERATORS, Operator) + # more attributes
+ [(type_ + r"\[\]", Keyword.Type) for type_ in TYPES] +
+ _keywords(TYPES, Keyword.Type) +
+ [
+ (r"[(){}\[\]]", Punctuation),
+ ("#.*?$", Comment.Single),
+ (",", Punctuation),
+ (";", Punctuation), # ";"s are allowed to combine separate metadata lines
+ ("=", Operator),
+ (r"[-]*([0-9]*[.])?[0-9]+(?:e[+-]*\d+)?", Number),
+ (r"'''(?:.|\n)*?'''", String),
+ (r'"""(?:.|\n)*?"""', String),
+ (r"'.*?'", String),
+ (r'".*?"', String),
+ (r"<(\.\./)*([\w/]+|[\w/]+\.\w+[\w:]*)>", Name.Namespace),
+ (r"@.*?@", String.Interpol),
+ (r'\(.*"[.\\n]*".*\)', String.Doc),
+ (r"\A#usda .+$", Comment.Hashbang),
+ (r"\s+", Whitespace),
+ (r"\w+", Text),
+ (r"[_:.]+", Punctuation),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/varnish.py b/contrib/python/Pygments/py3/pygments/lexers/varnish.py
index 618049be5b..9bcf2bf63c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/varnish.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/varnish.py
@@ -4,7 +4,7 @@
Lexers for Varnish configuration
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -60,7 +60,7 @@ class VCLLexer(RegexLexer):
bygroups(Name.Attribute, Operator, Name.Variable.Global, Punctuation)),
(r'(\.probe)(\s*=\s*)(\{)',
bygroups(Name.Attribute, Operator, Punctuation), 'probe'),
- (r'(\.\w+\b)(\s*=\s*)([^;\s]*)(\s*;)',
+ (r'(\.\w+\b)(\s*=\s*)([^;\s]*)(\s*;)',
bygroups(Name.Attribute, Operator, using(this), Punctuation)),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/verification.py b/contrib/python/Pygments/py3/pygments/lexers/verification.py
index 2d473ae812..ec68f5a109 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/verification.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/verification.py
@@ -4,13 +4,13 @@
Lexer for Intermediate Verification Languages (IVLs).
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include, words
from pygments.token import Comment, Operator, Keyword, Name, Number, \
- Punctuation, Text, Generic
+ Punctuation, Text, Generic
__all__ = ['BoogieLexer', 'SilverLexer']
@@ -28,9 +28,9 @@ class BoogieLexer(RegexLexer):
tokens = {
'root': [
# Whitespace and Comments
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
(r'//[/!](.*?)\n', Comment.Doc),
(r'//(.*?)\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
@@ -45,7 +45,7 @@ class BoogieLexer(RegexLexer):
(words(('bool', 'int', 'ref'), suffix=r'\b'), Keyword.Type),
include('numbers'),
(r"(>=|<=|:=|!=|==>|&&|\|\||[+/\-=>*<\[\]])", Operator),
- (r'\{.*?\}', Generic.Emph), #triggers
+ (r'\{.*?\}', Generic.Emph), #triggers
(r"([{}():;,.])", Punctuation),
# Identifier
(r'[a-zA-Z_]\w*', Name),
@@ -75,9 +75,9 @@ class SilverLexer(RegexLexer):
tokens = {
'root': [
# Whitespace and Comments
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
(r'//[/!](.*?)\n', Comment.Doc),
(r'//(.*?)\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
@@ -85,18 +85,18 @@ class SilverLexer(RegexLexer):
(words((
'result', 'true', 'false', 'null', 'method', 'function',
'predicate', 'program', 'domain', 'axiom', 'var', 'returns',
- 'field', 'define', 'fold', 'unfold', 'inhale', 'exhale', 'new', 'assert',
+ 'field', 'define', 'fold', 'unfold', 'inhale', 'exhale', 'new', 'assert',
'assume', 'goto', 'while', 'if', 'elseif', 'else', 'fresh',
'constraining', 'Seq', 'Set', 'Multiset', 'union', 'intersection',
'setminus', 'subset', 'unfolding', 'in', 'old', 'forall', 'exists',
'acc', 'wildcard', 'write', 'none', 'epsilon', 'perm', 'unique',
'apply', 'package', 'folding', 'label', 'forperm'),
suffix=r'\b'), Keyword),
- (words(('requires', 'ensures', 'invariant'), suffix=r'\b'), Name.Decorator),
- (words(('Int', 'Perm', 'Bool', 'Ref', 'Rational'), suffix=r'\b'), Keyword.Type),
+ (words(('requires', 'ensures', 'invariant'), suffix=r'\b'), Name.Decorator),
+ (words(('Int', 'Perm', 'Bool', 'Ref', 'Rational'), suffix=r'\b'), Keyword.Type),
include('numbers'),
(r'[!%&*+=|?:<>/\-\[\]]', Operator),
- (r'\{.*?\}', Generic.Emph), #triggers
+ (r'\{.*?\}', Generic.Emph), #triggers
(r'([{}():;,.])', Punctuation),
# Identifier
(r'[\w$]\w*', Name),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/web.py b/contrib/python/Pygments/py3/pygments/lexers/web.py
index a186048888..54d2a8865b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/web.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/web.py
@@ -4,7 +4,7 @@
Just export previously exported lexers.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/webassembly.py b/contrib/python/Pygments/py3/pygments/lexers/webassembly.py
index d8d84b5c47..1cfd978296 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/webassembly.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/webassembly.py
@@ -1,119 +1,119 @@
-"""
- pygments.lexers.webassembly
+"""
+ pygments.lexers.webassembly
~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the WebAssembly text format.
-
- The grammar can be found at https://github.com/WebAssembly/spec/blob/master/interpreter/README.md
- and https://webassembly.github.io/spec/core/text/.
-
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, bygroups, default
+
+ Lexers for the WebAssembly text format.
+
+ The grammar can be found at https://github.com/WebAssembly/spec/blob/master/interpreter/README.md
+ and https://webassembly.github.io/spec/core/text/.
+
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, bygroups, default
from pygments.token import Text, Comment, Operator, Keyword, String, Number, Punctuation, Name
-
-__all__ = ['WatLexer']
-
-keywords = (
- 'module', 'import', 'func', 'funcref', 'start', 'param', 'local', 'type',
- 'result', 'export', 'memory', 'global', 'mut', 'data', 'table', 'elem',
- 'if', 'then', 'else', 'end', 'block', 'loop'
-)
-
-builtins = (
- 'unreachable', 'nop', 'block', 'loop', 'if', 'else', 'end', 'br', 'br_if',
- 'br_table', 'return', 'call', 'call_indirect', 'drop', 'select',
- 'local.get', 'local.set', 'local.tee', 'global.get', 'global.set',
- 'i32.load', 'i64.load', 'f32.load', 'f64.load', 'i32.load8_s',
- 'i32.load8_u', 'i32.load16_s', 'i32.load16_u', 'i64.load8_s',
- 'i64.load8_u', 'i64.load16_s', 'i64.load16_u', 'i64.load32_s',
- 'i64.load32_u', 'i32.store', 'i64.store', 'f32.store', 'f64.store',
- 'i32.store8', 'i32.store16', 'i64.store8', 'i64.store16', 'i64.store32',
- 'memory.size', 'memory.grow', 'i32.const', 'i64.const', 'f32.const',
- 'f64.const', 'i32.eqz', 'i32.eq', 'i32.ne', 'i32.lt_s', 'i32.lt_u',
- 'i32.gt_s', 'i32.gt_u', 'i32.le_s', 'i32.le_u', 'i32.ge_s', 'i32.ge_u',
- 'i64.eqz', 'i64.eq', 'i64.ne', 'i64.lt_s', 'i64.lt_u', 'i64.gt_s',
- 'i64.gt_u', 'i64.le_s', 'i64.le_u', 'i64.ge_s', 'i64.ge_u', 'f32.eq',
- 'f32.ne', 'f32.lt', 'f32.gt', 'f32.le', 'f32.ge', 'f64.eq', 'f64.ne',
- 'f64.lt', 'f64.gt', 'f64.le', 'f64.ge', 'i32.clz', 'i32.ctz', 'i32.popcnt',
- 'i32.add', 'i32.sub', 'i32.mul', 'i32.div_s', 'i32.div_u', 'i32.rem_s',
- 'i32.rem_u', 'i32.and', 'i32.or', 'i32.xor', 'i32.shl', 'i32.shr_s',
- 'i32.shr_u', 'i32.rotl', 'i32.rotr', 'i64.clz', 'i64.ctz', 'i64.popcnt',
- 'i64.add', 'i64.sub', 'i64.mul', 'i64.div_s', 'i64.div_u', 'i64.rem_s',
- 'i64.rem_u', 'i64.and', 'i64.or', 'i64.xor', 'i64.shl', 'i64.shr_s',
- 'i64.shr_u', 'i64.rotl', 'i64.rotr', 'f32.abs', 'f32.neg', 'f32.ceil',
- 'f32.floor', 'f32.trunc', 'f32.nearest', 'f32.sqrt', 'f32.add', 'f32.sub',
- 'f32.mul', 'f32.div', 'f32.min', 'f32.max', 'f32.copysign', 'f64.abs',
- 'f64.neg', 'f64.ceil', 'f64.floor', 'f64.trunc', 'f64.nearest', 'f64.sqrt',
- 'f64.add', 'f64.sub', 'f64.mul', 'f64.div', 'f64.min', 'f64.max',
- 'f64.copysign', 'i32.wrap_i64', 'i32.trunc_f32_s', 'i32.trunc_f32_u',
- 'i32.trunc_f64_s', 'i32.trunc_f64_u', 'i64.extend_i32_s',
- 'i64.extend_i32_u', 'i64.trunc_f32_s', 'i64.trunc_f32_u',
- 'i64.trunc_f64_s', 'i64.trunc_f64_u', 'f32.convert_i32_s',
- 'f32.convert_i32_u', 'f32.convert_i64_s', 'f32.convert_i64_u',
- 'f32.demote_f64', 'f64.convert_i32_s', 'f64.convert_i32_u',
- 'f64.convert_i64_s', 'f64.convert_i64_u', 'f64.promote_f32',
- 'i32.reinterpret_f32', 'i64.reinterpret_f64', 'f32.reinterpret_i32',
- 'f64.reinterpret_i64',
-)
-
-
-class WatLexer(RegexLexer):
- """Lexer for the `WebAssembly text format <https://webassembly.org/>`_.
-
- .. versionadded:: 2.9
- """
-
- name = 'WebAssembly'
- aliases = ['wast', 'wat']
- filenames = ['*.wat', '*.wast']
-
- tokens = {
- 'root': [
- (words(keywords, suffix=r'(?=[^a-z_\.])'), Keyword),
- (words(builtins), Name.Builtin, 'arguments'),
- (words(['i32', 'i64', 'f32', 'f64']), Keyword.Type),
- (r'\$[A-Za-z0-9!#$%&\'*+./:<=>?@\\^_`|~-]+', Name.Variable), # yes, all of the are valid in identifiers
- (r';;.*?$', Comment.Single),
- (r'\(;', Comment.Multiline, 'nesting_comment'),
- (r'[+-]?0x[\dA-Fa-f](_?[\dA-Fa-f])*(.([\dA-Fa-f](_?[\dA-Fa-f])*)?)?([pP][+-]?[\dA-Fa-f](_?[\dA-Fa-f])*)?', Number.Float),
- (r'[+-]?\d.\d(_?\d)*[eE][+-]?\d(_?\d)*', Number.Float),
- (r'[+-]?\d.\d(_?\d)*', Number.Float),
- (r'[+-]?\d.[eE][+-]?\d(_?\d)*', Number.Float),
- (r'[+-]?(inf|nan:0x[\dA-Fa-f](_?[\dA-Fa-f])*|nan)', Number.Float),
- (r'[+-]?0x[\dA-Fa-f](_?[\dA-Fa-f])*', Number.Hex),
- (r'[+-]?\d(_?\d)*', Number.Integer),
- (r'[\(\)]', Punctuation),
- (r'"', String.Double, 'string'),
- (r'\s+', Text),
- ],
- 'nesting_comment': [
- (r'\(;', Comment.Multiline, '#push'),
- (r';\)', Comment.Multiline, '#pop'),
- (r'[^;(]+', Comment.Multiline),
- (r'[;(]', Comment.Multiline),
- ],
- 'string': [
- (r'\\[\dA-Fa-f][\dA-Fa-f]', String.Escape), # must have exactly two hex digits
- (r'\\t', String.Escape),
- (r'\\n', String.Escape),
- (r'\\r', String.Escape),
- (r'\\"', String.Escape),
- (r"\\'", String.Escape),
- (r'\\u\{[\dA-Fa-f](_?[\dA-Fa-f])*\}', String.Escape),
- (r'\\\\', String.Escape),
- (r'"', String.Double, '#pop'),
- (r'[^"\\]+', String.Double),
- ],
- 'arguments': [
- (r'\s+', Text),
- (r'(offset)(=)(0x[\dA-Fa-f](_?[\dA-Fa-f])*)', bygroups(Keyword, Operator, Number.Hex)),
- (r'(offset)(=)(\d(_?\d)*)', bygroups(Keyword, Operator, Number.Integer)),
- (r'(align)(=)(0x[\dA-Fa-f](_?[\dA-Fa-f])*)', bygroups(Keyword, Operator, Number.Hex)),
- (r'(align)(=)(\d(_?\d)*)', bygroups(Keyword, Operator, Number.Integer)),
- default('#pop'),
- ]
- }
+
+__all__ = ['WatLexer']
+
+keywords = (
+ 'module', 'import', 'func', 'funcref', 'start', 'param', 'local', 'type',
+ 'result', 'export', 'memory', 'global', 'mut', 'data', 'table', 'elem',
+ 'if', 'then', 'else', 'end', 'block', 'loop'
+)
+
+builtins = (
+ 'unreachable', 'nop', 'block', 'loop', 'if', 'else', 'end', 'br', 'br_if',
+ 'br_table', 'return', 'call', 'call_indirect', 'drop', 'select',
+ 'local.get', 'local.set', 'local.tee', 'global.get', 'global.set',
+ 'i32.load', 'i64.load', 'f32.load', 'f64.load', 'i32.load8_s',
+ 'i32.load8_u', 'i32.load16_s', 'i32.load16_u', 'i64.load8_s',
+ 'i64.load8_u', 'i64.load16_s', 'i64.load16_u', 'i64.load32_s',
+ 'i64.load32_u', 'i32.store', 'i64.store', 'f32.store', 'f64.store',
+ 'i32.store8', 'i32.store16', 'i64.store8', 'i64.store16', 'i64.store32',
+ 'memory.size', 'memory.grow', 'i32.const', 'i64.const', 'f32.const',
+ 'f64.const', 'i32.eqz', 'i32.eq', 'i32.ne', 'i32.lt_s', 'i32.lt_u',
+ 'i32.gt_s', 'i32.gt_u', 'i32.le_s', 'i32.le_u', 'i32.ge_s', 'i32.ge_u',
+ 'i64.eqz', 'i64.eq', 'i64.ne', 'i64.lt_s', 'i64.lt_u', 'i64.gt_s',
+ 'i64.gt_u', 'i64.le_s', 'i64.le_u', 'i64.ge_s', 'i64.ge_u', 'f32.eq',
+ 'f32.ne', 'f32.lt', 'f32.gt', 'f32.le', 'f32.ge', 'f64.eq', 'f64.ne',
+ 'f64.lt', 'f64.gt', 'f64.le', 'f64.ge', 'i32.clz', 'i32.ctz', 'i32.popcnt',
+ 'i32.add', 'i32.sub', 'i32.mul', 'i32.div_s', 'i32.div_u', 'i32.rem_s',
+ 'i32.rem_u', 'i32.and', 'i32.or', 'i32.xor', 'i32.shl', 'i32.shr_s',
+ 'i32.shr_u', 'i32.rotl', 'i32.rotr', 'i64.clz', 'i64.ctz', 'i64.popcnt',
+ 'i64.add', 'i64.sub', 'i64.mul', 'i64.div_s', 'i64.div_u', 'i64.rem_s',
+ 'i64.rem_u', 'i64.and', 'i64.or', 'i64.xor', 'i64.shl', 'i64.shr_s',
+ 'i64.shr_u', 'i64.rotl', 'i64.rotr', 'f32.abs', 'f32.neg', 'f32.ceil',
+ 'f32.floor', 'f32.trunc', 'f32.nearest', 'f32.sqrt', 'f32.add', 'f32.sub',
+ 'f32.mul', 'f32.div', 'f32.min', 'f32.max', 'f32.copysign', 'f64.abs',
+ 'f64.neg', 'f64.ceil', 'f64.floor', 'f64.trunc', 'f64.nearest', 'f64.sqrt',
+ 'f64.add', 'f64.sub', 'f64.mul', 'f64.div', 'f64.min', 'f64.max',
+ 'f64.copysign', 'i32.wrap_i64', 'i32.trunc_f32_s', 'i32.trunc_f32_u',
+ 'i32.trunc_f64_s', 'i32.trunc_f64_u', 'i64.extend_i32_s',
+ 'i64.extend_i32_u', 'i64.trunc_f32_s', 'i64.trunc_f32_u',
+ 'i64.trunc_f64_s', 'i64.trunc_f64_u', 'f32.convert_i32_s',
+ 'f32.convert_i32_u', 'f32.convert_i64_s', 'f32.convert_i64_u',
+ 'f32.demote_f64', 'f64.convert_i32_s', 'f64.convert_i32_u',
+ 'f64.convert_i64_s', 'f64.convert_i64_u', 'f64.promote_f32',
+ 'i32.reinterpret_f32', 'i64.reinterpret_f64', 'f32.reinterpret_i32',
+ 'f64.reinterpret_i64',
+)
+
+
+class WatLexer(RegexLexer):
+ """Lexer for the `WebAssembly text format <https://webassembly.org/>`_.
+
+ .. versionadded:: 2.9
+ """
+
+ name = 'WebAssembly'
+ aliases = ['wast', 'wat']
+ filenames = ['*.wat', '*.wast']
+
+ tokens = {
+ 'root': [
+ (words(keywords, suffix=r'(?=[^a-z_\.])'), Keyword),
+ (words(builtins), Name.Builtin, 'arguments'),
+ (words(['i32', 'i64', 'f32', 'f64']), Keyword.Type),
+ (r'\$[A-Za-z0-9!#$%&\'*+./:<=>?@\\^_`|~-]+', Name.Variable), # yes, all of the are valid in identifiers
+ (r';;.*?$', Comment.Single),
+ (r'\(;', Comment.Multiline, 'nesting_comment'),
+ (r'[+-]?0x[\dA-Fa-f](_?[\dA-Fa-f])*(.([\dA-Fa-f](_?[\dA-Fa-f])*)?)?([pP][+-]?[\dA-Fa-f](_?[\dA-Fa-f])*)?', Number.Float),
+ (r'[+-]?\d.\d(_?\d)*[eE][+-]?\d(_?\d)*', Number.Float),
+ (r'[+-]?\d.\d(_?\d)*', Number.Float),
+ (r'[+-]?\d.[eE][+-]?\d(_?\d)*', Number.Float),
+ (r'[+-]?(inf|nan:0x[\dA-Fa-f](_?[\dA-Fa-f])*|nan)', Number.Float),
+ (r'[+-]?0x[\dA-Fa-f](_?[\dA-Fa-f])*', Number.Hex),
+ (r'[+-]?\d(_?\d)*', Number.Integer),
+ (r'[\(\)]', Punctuation),
+ (r'"', String.Double, 'string'),
+ (r'\s+', Text),
+ ],
+ 'nesting_comment': [
+ (r'\(;', Comment.Multiline, '#push'),
+ (r';\)', Comment.Multiline, '#pop'),
+ (r'[^;(]+', Comment.Multiline),
+ (r'[;(]', Comment.Multiline),
+ ],
+ 'string': [
+ (r'\\[\dA-Fa-f][\dA-Fa-f]', String.Escape), # must have exactly two hex digits
+ (r'\\t', String.Escape),
+ (r'\\n', String.Escape),
+ (r'\\r', String.Escape),
+ (r'\\"', String.Escape),
+ (r"\\'", String.Escape),
+ (r'\\u\{[\dA-Fa-f](_?[\dA-Fa-f])*\}', String.Escape),
+ (r'\\\\', String.Escape),
+ (r'"', String.Double, '#pop'),
+ (r'[^"\\]+', String.Double),
+ ],
+ 'arguments': [
+ (r'\s+', Text),
+ (r'(offset)(=)(0x[\dA-Fa-f](_?[\dA-Fa-f])*)', bygroups(Keyword, Operator, Number.Hex)),
+ (r'(offset)(=)(\d(_?\d)*)', bygroups(Keyword, Operator, Number.Integer)),
+ (r'(align)(=)(0x[\dA-Fa-f](_?[\dA-Fa-f])*)', bygroups(Keyword, Operator, Number.Hex)),
+ (r'(align)(=)(\d(_?\d)*)', bygroups(Keyword, Operator, Number.Integer)),
+ default('#pop'),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/webidl.py b/contrib/python/Pygments/py3/pygments/lexers/webidl.py
index d805484c74..6b5627695c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/webidl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/webidl.py
@@ -1,298 +1,298 @@
-"""
- pygments.lexers.webidl
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Web IDL, including some extensions.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, default, include, words
-from pygments.token import Comment, Keyword, Name, Number, Punctuation, \
- String, Text
-
-__all__ = ['WebIDLLexer']
-
-_builtin_types = (
- # primitive types
- 'byte', 'octet', 'boolean',
- r'(?:unsigned\s+)?(?:short|long(?:\s+long)?)',
- r'(?:unrestricted\s+)?(?:float|double)',
- # string types
- 'DOMString', 'ByteString', 'USVString',
- # exception types
- 'Error', 'DOMException',
- # typed array types
- 'Uint8Array', 'Uint16Array', 'Uint32Array', 'Uint8ClampedArray',
- 'Float32Array', 'Float64Array',
- # buffer source types
- 'ArrayBuffer', 'DataView', 'Int8Array', 'Int16Array', 'Int32Array',
- # other
- 'any', 'void', 'object', 'RegExp',
-)
-_identifier = r'_?[A-Za-z][a-zA-Z0-9_-]*'
-_keyword_suffix = r'(?![\w-])'
-_string = r'"[^"]*"'
-
-
-class WebIDLLexer(RegexLexer):
- """
- For Web IDL.
-
- .. versionadded:: 2.6
- """
-
- name = 'Web IDL'
- aliases = ['webidl']
- filenames = ['*.webidl']
-
- tokens = {
- 'common': [
- (r'\s+', Text),
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- (r'//.*', Comment.Single),
- (r'^#.*', Comment.Preproc),
- ],
- 'root': [
- include('common'),
- (r'\[', Punctuation, 'extended_attributes'),
- (r'partial' + _keyword_suffix, Keyword),
- (r'typedef' + _keyword_suffix, Keyword, ('typedef', 'type')),
- (r'interface' + _keyword_suffix, Keyword, 'interface_rest'),
- (r'enum' + _keyword_suffix, Keyword, 'enum_rest'),
- (r'callback' + _keyword_suffix, Keyword, 'callback_rest'),
- (r'dictionary' + _keyword_suffix, Keyword, 'dictionary_rest'),
- (r'namespace' + _keyword_suffix, Keyword, 'namespace_rest'),
- (_identifier, Name.Class, 'implements_rest'),
- ],
- 'extended_attributes': [
- include('common'),
- (r',', Punctuation),
- (_identifier, Name.Decorator),
- (r'=', Punctuation, 'extended_attribute_rest'),
- (r'\(', Punctuation, 'argument_list'),
- (r'\]', Punctuation, '#pop'),
- ],
- 'extended_attribute_rest': [
- include('common'),
- (_identifier, Name, 'extended_attribute_named_rest'),
- (_string, String),
- (r'\(', Punctuation, 'identifier_list'),
- default('#pop'),
- ],
- 'extended_attribute_named_rest': [
- include('common'),
- (r'\(', Punctuation, 'argument_list'),
- default('#pop'),
- ],
- 'argument_list': [
- include('common'),
- (r'\)', Punctuation, '#pop'),
- default('argument'),
- ],
- 'argument': [
- include('common'),
- (r'optional' + _keyword_suffix, Keyword),
- (r'\[', Punctuation, 'extended_attributes'),
- (r',', Punctuation, '#pop'),
- (r'\)', Punctuation, '#pop:2'),
- default(('argument_rest', 'type'))
- ],
- 'argument_rest': [
- include('common'),
- (_identifier, Name.Variable),
- (r'\.\.\.', Punctuation),
- (r'=', Punctuation, 'default_value'),
- default('#pop'),
- ],
- 'identifier_list': [
- include('common'),
- (_identifier, Name.Class),
- (r',', Punctuation),
- (r'\)', Punctuation, '#pop'),
- ],
- 'type': [
- include('common'),
- (r'(?:' + r'|'.join(_builtin_types) + r')' + _keyword_suffix,
- Keyword.Type, 'type_null'),
- (words(('sequence', 'Promise', 'FrozenArray'),
- suffix=_keyword_suffix), Keyword.Type, 'type_identifier'),
- (_identifier, Name.Class, 'type_identifier'),
- (r'\(', Punctuation, 'union_type'),
- ],
- 'union_type': [
- include('common'),
- (r'or' + _keyword_suffix, Keyword),
- (r'\)', Punctuation, ('#pop', 'type_null')),
- default('type'),
- ],
- 'type_identifier': [
- (r'<', Punctuation, 'type_list'),
- default(('#pop', 'type_null'))
- ],
- 'type_null': [
- (r'\?', Punctuation),
- default('#pop:2'),
- ],
- 'default_value': [
- include('common'),
- include('const_value'),
- (_string, String, '#pop'),
- (r'\[\s*\]', Punctuation, '#pop'),
- ],
- 'const_value': [
- include('common'),
- (words(('true', 'false', '-Infinity', 'Infinity', 'NaN', 'null'),
- suffix=_keyword_suffix), Keyword.Constant, '#pop'),
- (r'-?(?:(?:[0-9]+\.[0-9]*|[0-9]*\.[0-9]+)(?:[Ee][+-]?[0-9]+)?' +
- r'|[0-9]+[Ee][+-]?[0-9]+)', Number.Float, '#pop'),
- (r'-?[1-9][0-9]*', Number.Integer, '#pop'),
- (r'-?0[Xx][0-9A-Fa-f]+', Number.Hex, '#pop'),
- (r'-?0[0-7]*', Number.Oct, '#pop'),
- ],
- 'typedef': [
- include('common'),
- (_identifier, Name.Class),
- (r';', Punctuation, '#pop'),
- ],
- 'namespace_rest': [
- include('common'),
- (_identifier, Name.Namespace),
- (r'\{', Punctuation, 'namespace_body'),
- (r';', Punctuation, '#pop'),
- ],
- 'namespace_body': [
- include('common'),
- (r'\[', Punctuation, 'extended_attributes'),
- (r'readonly' + _keyword_suffix, Keyword),
- (r'attribute' + _keyword_suffix,
- Keyword, ('attribute_rest', 'type')),
- (r'const' + _keyword_suffix, Keyword, ('const_rest', 'type')),
- (r'\}', Punctuation, '#pop'),
- default(('operation_rest', 'type')),
- ],
- 'interface_rest': [
- include('common'),
- (_identifier, Name.Class),
- (r':', Punctuation),
- (r'\{', Punctuation, 'interface_body'),
- (r';', Punctuation, '#pop'),
- ],
- 'interface_body': [
- (words(('iterable', 'maplike', 'setlike'), suffix=_keyword_suffix),
- Keyword, 'iterable_maplike_setlike_rest'),
- (words(('setter', 'getter', 'creator', 'deleter', 'legacycaller',
- 'inherit', 'static', 'stringifier', 'jsonifier'),
- suffix=_keyword_suffix), Keyword),
- (r'serializer' + _keyword_suffix, Keyword, 'serializer_rest'),
- (r';', Punctuation),
- include('namespace_body'),
- ],
- 'attribute_rest': [
- include('common'),
- (_identifier, Name.Variable),
- (r';', Punctuation, '#pop'),
- ],
- 'const_rest': [
- include('common'),
- (_identifier, Name.Constant),
- (r'=', Punctuation, 'const_value'),
- (r';', Punctuation, '#pop'),
- ],
- 'operation_rest': [
- include('common'),
- (r';', Punctuation, '#pop'),
- default('operation'),
- ],
- 'operation': [
- include('common'),
- (_identifier, Name.Function),
- (r'\(', Punctuation, 'argument_list'),
- (r';', Punctuation, '#pop:2'),
- ],
- 'iterable_maplike_setlike_rest': [
- include('common'),
- (r'<', Punctuation, 'type_list'),
- (r';', Punctuation, '#pop'),
- ],
- 'type_list': [
- include('common'),
- (r',', Punctuation),
- (r'>', Punctuation, '#pop'),
- default('type'),
- ],
- 'serializer_rest': [
- include('common'),
- (r'=', Punctuation, 'serialization_pattern'),
- (r';', Punctuation, '#pop'),
- default('operation'),
- ],
- 'serialization_pattern': [
- include('common'),
- (_identifier, Name.Variable, '#pop'),
- (r'\{', Punctuation, 'serialization_pattern_map'),
- (r'\[', Punctuation, 'serialization_pattern_list'),
- ],
- 'serialization_pattern_map': [
- include('common'),
- (words(('getter', 'inherit', 'attribute'),
- suffix=_keyword_suffix), Keyword),
- (r',', Punctuation),
- (_identifier, Name.Variable),
- (r'\}', Punctuation, '#pop:2'),
- ],
- 'serialization_pattern_list': [
- include('common'),
- (words(('getter', 'attribute'), suffix=_keyword_suffix), Keyword),
- (r',', Punctuation),
- (_identifier, Name.Variable),
- (r']', Punctuation, '#pop:2'),
- ],
- 'enum_rest': [
- include('common'),
- (_identifier, Name.Class),
- (r'\{', Punctuation, 'enum_body'),
- (r';', Punctuation, '#pop'),
- ],
- 'enum_body': [
- include('common'),
- (_string, String),
- (r',', Punctuation),
- (r'\}', Punctuation, '#pop'),
- ],
- 'callback_rest': [
- include('common'),
- (r'interface' + _keyword_suffix,
- Keyword, ('#pop', 'interface_rest')),
- (_identifier, Name.Class),
- (r'=', Punctuation, ('operation', 'type')),
- (r';', Punctuation, '#pop'),
- ],
- 'dictionary_rest': [
- include('common'),
- (_identifier, Name.Class),
- (r':', Punctuation),
- (r'\{', Punctuation, 'dictionary_body'),
- (r';', Punctuation, '#pop'),
- ],
- 'dictionary_body': [
- include('common'),
- (r'\[', Punctuation, 'extended_attributes'),
- (r'required' + _keyword_suffix, Keyword),
- (r'\}', Punctuation, '#pop'),
- default(('dictionary_item', 'type')),
- ],
- 'dictionary_item': [
- include('common'),
- (_identifier, Name.Variable),
- (r'=', Punctuation, 'default_value'),
- (r';', Punctuation, '#pop'),
- ],
- 'implements_rest': [
- include('common'),
- (r'implements' + _keyword_suffix, Keyword),
- (_identifier, Name.Class),
- (r';', Punctuation, '#pop'),
- ],
- }
+"""
+ pygments.lexers.webidl
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Web IDL, including some extensions.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, default, include, words
+from pygments.token import Comment, Keyword, Name, Number, Punctuation, \
+ String, Text
+
+__all__ = ['WebIDLLexer']
+
+_builtin_types = (
+ # primitive types
+ 'byte', 'octet', 'boolean',
+ r'(?:unsigned\s+)?(?:short|long(?:\s+long)?)',
+ r'(?:unrestricted\s+)?(?:float|double)',
+ # string types
+ 'DOMString', 'ByteString', 'USVString',
+ # exception types
+ 'Error', 'DOMException',
+ # typed array types
+ 'Uint8Array', 'Uint16Array', 'Uint32Array', 'Uint8ClampedArray',
+ 'Float32Array', 'Float64Array',
+ # buffer source types
+ 'ArrayBuffer', 'DataView', 'Int8Array', 'Int16Array', 'Int32Array',
+ # other
+ 'any', 'void', 'object', 'RegExp',
+)
+_identifier = r'_?[A-Za-z][a-zA-Z0-9_-]*'
+_keyword_suffix = r'(?![\w-])'
+_string = r'"[^"]*"'
+
+
+class WebIDLLexer(RegexLexer):
+ """
+ For Web IDL.
+
+ .. versionadded:: 2.6
+ """
+
+ name = 'Web IDL'
+ aliases = ['webidl']
+ filenames = ['*.webidl']
+
+ tokens = {
+ 'common': [
+ (r'\s+', Text),
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ (r'//.*', Comment.Single),
+ (r'^#.*', Comment.Preproc),
+ ],
+ 'root': [
+ include('common'),
+ (r'\[', Punctuation, 'extended_attributes'),
+ (r'partial' + _keyword_suffix, Keyword),
+ (r'typedef' + _keyword_suffix, Keyword, ('typedef', 'type')),
+ (r'interface' + _keyword_suffix, Keyword, 'interface_rest'),
+ (r'enum' + _keyword_suffix, Keyword, 'enum_rest'),
+ (r'callback' + _keyword_suffix, Keyword, 'callback_rest'),
+ (r'dictionary' + _keyword_suffix, Keyword, 'dictionary_rest'),
+ (r'namespace' + _keyword_suffix, Keyword, 'namespace_rest'),
+ (_identifier, Name.Class, 'implements_rest'),
+ ],
+ 'extended_attributes': [
+ include('common'),
+ (r',', Punctuation),
+ (_identifier, Name.Decorator),
+ (r'=', Punctuation, 'extended_attribute_rest'),
+ (r'\(', Punctuation, 'argument_list'),
+ (r'\]', Punctuation, '#pop'),
+ ],
+ 'extended_attribute_rest': [
+ include('common'),
+ (_identifier, Name, 'extended_attribute_named_rest'),
+ (_string, String),
+ (r'\(', Punctuation, 'identifier_list'),
+ default('#pop'),
+ ],
+ 'extended_attribute_named_rest': [
+ include('common'),
+ (r'\(', Punctuation, 'argument_list'),
+ default('#pop'),
+ ],
+ 'argument_list': [
+ include('common'),
+ (r'\)', Punctuation, '#pop'),
+ default('argument'),
+ ],
+ 'argument': [
+ include('common'),
+ (r'optional' + _keyword_suffix, Keyword),
+ (r'\[', Punctuation, 'extended_attributes'),
+ (r',', Punctuation, '#pop'),
+ (r'\)', Punctuation, '#pop:2'),
+ default(('argument_rest', 'type'))
+ ],
+ 'argument_rest': [
+ include('common'),
+ (_identifier, Name.Variable),
+ (r'\.\.\.', Punctuation),
+ (r'=', Punctuation, 'default_value'),
+ default('#pop'),
+ ],
+ 'identifier_list': [
+ include('common'),
+ (_identifier, Name.Class),
+ (r',', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ 'type': [
+ include('common'),
+ (r'(?:' + r'|'.join(_builtin_types) + r')' + _keyword_suffix,
+ Keyword.Type, 'type_null'),
+ (words(('sequence', 'Promise', 'FrozenArray'),
+ suffix=_keyword_suffix), Keyword.Type, 'type_identifier'),
+ (_identifier, Name.Class, 'type_identifier'),
+ (r'\(', Punctuation, 'union_type'),
+ ],
+ 'union_type': [
+ include('common'),
+ (r'or' + _keyword_suffix, Keyword),
+ (r'\)', Punctuation, ('#pop', 'type_null')),
+ default('type'),
+ ],
+ 'type_identifier': [
+ (r'<', Punctuation, 'type_list'),
+ default(('#pop', 'type_null'))
+ ],
+ 'type_null': [
+ (r'\?', Punctuation),
+ default('#pop:2'),
+ ],
+ 'default_value': [
+ include('common'),
+ include('const_value'),
+ (_string, String, '#pop'),
+ (r'\[\s*\]', Punctuation, '#pop'),
+ ],
+ 'const_value': [
+ include('common'),
+ (words(('true', 'false', '-Infinity', 'Infinity', 'NaN', 'null'),
+ suffix=_keyword_suffix), Keyword.Constant, '#pop'),
+ (r'-?(?:(?:[0-9]+\.[0-9]*|[0-9]*\.[0-9]+)(?:[Ee][+-]?[0-9]+)?' +
+ r'|[0-9]+[Ee][+-]?[0-9]+)', Number.Float, '#pop'),
+ (r'-?[1-9][0-9]*', Number.Integer, '#pop'),
+ (r'-?0[Xx][0-9A-Fa-f]+', Number.Hex, '#pop'),
+ (r'-?0[0-7]*', Number.Oct, '#pop'),
+ ],
+ 'typedef': [
+ include('common'),
+ (_identifier, Name.Class),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'namespace_rest': [
+ include('common'),
+ (_identifier, Name.Namespace),
+ (r'\{', Punctuation, 'namespace_body'),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'namespace_body': [
+ include('common'),
+ (r'\[', Punctuation, 'extended_attributes'),
+ (r'readonly' + _keyword_suffix, Keyword),
+ (r'attribute' + _keyword_suffix,
+ Keyword, ('attribute_rest', 'type')),
+ (r'const' + _keyword_suffix, Keyword, ('const_rest', 'type')),
+ (r'\}', Punctuation, '#pop'),
+ default(('operation_rest', 'type')),
+ ],
+ 'interface_rest': [
+ include('common'),
+ (_identifier, Name.Class),
+ (r':', Punctuation),
+ (r'\{', Punctuation, 'interface_body'),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'interface_body': [
+ (words(('iterable', 'maplike', 'setlike'), suffix=_keyword_suffix),
+ Keyword, 'iterable_maplike_setlike_rest'),
+ (words(('setter', 'getter', 'creator', 'deleter', 'legacycaller',
+ 'inherit', 'static', 'stringifier', 'jsonifier'),
+ suffix=_keyword_suffix), Keyword),
+ (r'serializer' + _keyword_suffix, Keyword, 'serializer_rest'),
+ (r';', Punctuation),
+ include('namespace_body'),
+ ],
+ 'attribute_rest': [
+ include('common'),
+ (_identifier, Name.Variable),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'const_rest': [
+ include('common'),
+ (_identifier, Name.Constant),
+ (r'=', Punctuation, 'const_value'),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'operation_rest': [
+ include('common'),
+ (r';', Punctuation, '#pop'),
+ default('operation'),
+ ],
+ 'operation': [
+ include('common'),
+ (_identifier, Name.Function),
+ (r'\(', Punctuation, 'argument_list'),
+ (r';', Punctuation, '#pop:2'),
+ ],
+ 'iterable_maplike_setlike_rest': [
+ include('common'),
+ (r'<', Punctuation, 'type_list'),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'type_list': [
+ include('common'),
+ (r',', Punctuation),
+ (r'>', Punctuation, '#pop'),
+ default('type'),
+ ],
+ 'serializer_rest': [
+ include('common'),
+ (r'=', Punctuation, 'serialization_pattern'),
+ (r';', Punctuation, '#pop'),
+ default('operation'),
+ ],
+ 'serialization_pattern': [
+ include('common'),
+ (_identifier, Name.Variable, '#pop'),
+ (r'\{', Punctuation, 'serialization_pattern_map'),
+ (r'\[', Punctuation, 'serialization_pattern_list'),
+ ],
+ 'serialization_pattern_map': [
+ include('common'),
+ (words(('getter', 'inherit', 'attribute'),
+ suffix=_keyword_suffix), Keyword),
+ (r',', Punctuation),
+ (_identifier, Name.Variable),
+ (r'\}', Punctuation, '#pop:2'),
+ ],
+ 'serialization_pattern_list': [
+ include('common'),
+ (words(('getter', 'attribute'), suffix=_keyword_suffix), Keyword),
+ (r',', Punctuation),
+ (_identifier, Name.Variable),
+ (r']', Punctuation, '#pop:2'),
+ ],
+ 'enum_rest': [
+ include('common'),
+ (_identifier, Name.Class),
+ (r'\{', Punctuation, 'enum_body'),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'enum_body': [
+ include('common'),
+ (_string, String),
+ (r',', Punctuation),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'callback_rest': [
+ include('common'),
+ (r'interface' + _keyword_suffix,
+ Keyword, ('#pop', 'interface_rest')),
+ (_identifier, Name.Class),
+ (r'=', Punctuation, ('operation', 'type')),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'dictionary_rest': [
+ include('common'),
+ (_identifier, Name.Class),
+ (r':', Punctuation),
+ (r'\{', Punctuation, 'dictionary_body'),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'dictionary_body': [
+ include('common'),
+ (r'\[', Punctuation, 'extended_attributes'),
+ (r'required' + _keyword_suffix, Keyword),
+ (r'\}', Punctuation, '#pop'),
+ default(('dictionary_item', 'type')),
+ ],
+ 'dictionary_item': [
+ include('common'),
+ (_identifier, Name.Variable),
+ (r'=', Punctuation, 'default_value'),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'implements_rest': [
+ include('common'),
+ (r'implements' + _keyword_suffix, Keyword),
+ (_identifier, Name.Class),
+ (r';', Punctuation, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/webmisc.py b/contrib/python/Pygments/py3/pygments/lexers/webmisc.py
index b1fd455525..026552ebd2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/webmisc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/webmisc.py
@@ -4,7 +4,7 @@
Lexers for misc. web stuff.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -72,15 +72,15 @@ class XQueryLexer(ExtendedRegexLexer):
# FIX UNICODE LATER
# ncnamestartchar = (
- # r"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
- # r"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
- # r"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
- # r"[\u10000-\uEFFFF]"
+ # r"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
+ # r"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
+ # r"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
+ # r"[\u10000-\uEFFFF]"
# )
ncnamestartchar = r"(?:[A-Z]|_|[a-z])"
# FIX UNICODE LATER
- # ncnamechar = ncnamestartchar + (r"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
- # r"[\u203F-\u2040]")
+ # ncnamechar = ncnamestartchar + (r"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
+ # r"[\u203F-\u2040]")
ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
pitarget_namestartchar = r"(?:[A-KN-WYZ]|_|:|[a-kn-wyz])"
@@ -97,14 +97,14 @@ class XQueryLexer(ExtendedRegexLexer):
stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')"
# FIX UNICODE LATER
- # elementcontentchar = (r'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
- # r'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+ # elementcontentchar = (r'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
+ # r'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
- # quotattrcontentchar = (r'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
- # r'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+ # quotattrcontentchar = (r'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
+ # r'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
- # aposattrcontentchar = (r'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
- # r'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+ # aposattrcontentchar = (r'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
+ # r'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_`|~]'
# CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
@@ -127,8 +127,8 @@ class XQueryLexer(ExtendedRegexLexer):
def popstate_tag_callback(lexer, match, ctx):
yield match.start(), Name.Tag, match.group(1)
- if lexer.xquery_parse_state:
- ctx.stack.append(lexer.xquery_parse_state.pop())
+ if lexer.xquery_parse_state:
+ ctx.stack.append(lexer.xquery_parse_state.pop())
ctx.pos = match.end()
def popstate_xmlcomment_callback(lexer, match, ctx):
@@ -157,9 +157,9 @@ class XQueryLexer(ExtendedRegexLexer):
# state stack
if len(lexer.xquery_parse_state) == 0:
ctx.stack.pop()
- if not ctx.stack:
- # make sure we have at least the root state on invalid inputs
- ctx.stack = ['root']
+ if not ctx.stack:
+ # make sure we have at least the root state on invalid inputs
+ ctx.stack = ['root']
elif len(ctx.stack) > 1:
ctx.stack.append(lexer.xquery_parse_state.pop())
else:
@@ -517,8 +517,8 @@ class XQueryLexer(ExtendedRegexLexer):
'xml_comment': [
(r'(-->)', popstate_xmlcomment_callback),
(r'[^-]{1,2}', Literal),
- (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
- Literal),
+ (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
+ Literal),
],
'processing_instruction': [
(r'\s+', Text, 'processing_instruction_content'),
@@ -527,13 +527,13 @@ class XQueryLexer(ExtendedRegexLexer):
],
'processing_instruction_content': [
(r'\?>', String.Doc, '#pop'),
- (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
- Literal),
+ (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
+ Literal),
],
'cdata_section': [
(r']]>', String.Doc, '#pop'),
- (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
- Literal),
+ (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
+ Literal),
],
'start_tag': [
include('whitespace'),
@@ -602,8 +602,8 @@ class XQueryLexer(ExtendedRegexLexer):
],
'pragmacontents': [
(r'#\)', Punctuation, 'operator'),
- (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
- Literal),
+ (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
+ Literal),
(r'(\s+)', Text),
],
'occurrenceindicator': [
@@ -857,8 +857,8 @@ class QmlLexer(RegexLexer):
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
]
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/whiley.py b/contrib/python/Pygments/py3/pygments/lexers/whiley.py
index 82b100bc45..b42f3860a8 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/whiley.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/whiley.py
@@ -4,7 +4,7 @@
Lexers for the Whiley language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -96,11 +96,11 @@ class WhileyLexer(RegexLexer):
# operators and punctuation
(r'[{}()\[\],.;]', Punctuation),
- (r'[+\-*/%&|<>^!~@=:?'
+ (r'[+\-*/%&|<>^!~@=:?'
# unicode operators
- r'\u2200\u2203\u2205\u2282\u2286\u2283\u2287'
- r'\u222A\u2229\u2264\u2265\u2208\u2227\u2228'
- r']', Operator),
+ r'\u2200\u2203\u2205\u2282\u2286\u2283\u2287'
+ r'\u222A\u2229\u2264\u2265\u2208\u2227\u2228'
+ r']', Operator),
# identifier
(r'[a-zA-Z_]\w*', Name),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/x10.py b/contrib/python/Pygments/py3/pygments/lexers/x10.py
index a5aff36d49..ed97f0b626 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/x10.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/x10.py
@@ -4,7 +4,7 @@
Lexers for the X10 programming language.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -58,7 +58,7 @@ class X10Lexer(RegexLexer):
(r'\b(%s)\b' % '|'.join(types), Keyword.Type),
(r'\b(%s)\b' % '|'.join(values), Keyword.Constant),
(r'\b(%s)\b' % '|'.join(modifiers), Keyword.Declaration),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'.', Text)
],
diff --git a/contrib/python/Pygments/py3/pygments/lexers/xorg.py b/contrib/python/Pygments/py3/pygments/lexers/xorg.py
index 490b7c1d18..b8aeb20635 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/xorg.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/xorg.py
@@ -4,7 +4,7 @@
Lexers for Xorg configs.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -28,7 +28,7 @@ class XorgLexer(RegexLexer):
(r'((?:Sub)?Section)(\s+)("\w+")',
bygroups(String.Escape, Text, String.Escape)),
- (r'(End(?:Sub)?Section)', String.Escape),
+ (r'(End(?:Sub)?Section)', String.Escape),
(r'(\w+)(\s+)([^\n#]+)',
bygroups(Name.Builtin, Text, Name.Constant)),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/yang.py b/contrib/python/Pygments/py3/pygments/lexers/yang.py
index 4452df2d56..4397a95525 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/yang.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/yang.py
@@ -1,103 +1,103 @@
-"""
- pygments.lexers.yang
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the YANG 1.1 modeling language. See :rfc:`7950`.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import (RegexLexer, bygroups, words)
-from pygments.token import (Text, Token, Name, String, Comment,
- Number)
-
-__all__ = ['YangLexer']
-
-class YangLexer(RegexLexer):
- """
- Lexer for `YANG <https://tools.ietf.org/html/rfc7950/>`_, based on RFC7950
-
- .. versionadded:: 2.7
- """
- name = 'YANG'
- aliases = ['yang']
- filenames = ['*.yang']
- mimetypes = ['application/yang']
-
- #Keywords from RFC7950 ; oriented at BNF style
- TOP_STMTS_KEYWORDS = ("module", "submodule")
- MODULE_HEADER_STMT_KEYWORDS = ("belongs-to", "namespace", "prefix", "yang-version")
- META_STMT_KEYWORDS = ("contact", "description", "organization",
- "reference", "revision")
- LINKAGE_STMTS_KEYWORDS = ("import", "include", "revision-date")
- BODY_STMT_KEYWORDS = ("action", "argument", "augment", "deviation",
- "extension", "feature", "grouping", "identity",
- "if-feature", "input", "notification", "output",
- "rpc", "typedef")
- DATA_DEF_STMT_KEYWORDS = ("anydata", "anyxml", "case", "choice",
- "config", "container", "deviate", "leaf",
- "leaf-list", "list", "must", "presence",
- "refine", "uses", "when")
- TYPE_STMT_KEYWORDS = ("base", "bit", "default", "enum", "error-app-tag",
- "error-message", "fraction-digits", "length",
- "max-elements", "min-elements", "modifier",
- "ordered-by", "path", "pattern", "position",
- "range", "require-instance", "status", "type",
- "units", "value", "yin-element")
- LIST_STMT_KEYWORDS = ("key", "mandatory", "unique")
-
- #RFC7950 other keywords
- CONSTANTS_KEYWORDS = ("add", "current", "delete", "deprecated", "false",
- "invert-match", "max", "min", "not-supported",
- "obsolete", "replace", "true", "unbounded", "user")
-
- #RFC7950 Built-In Types
- TYPES = ("binary", "bits", "boolean", "decimal64", "empty", "enumeration",
- "identityref", "instance-identifier", "int16", "int32", "int64",
- "int8", "leafref", "string", "uint16", "uint32", "uint64",
- "uint8", "union")
-
- suffix_re_pattern = r'(?=[^\w\-:])'
-
- tokens = {
- 'comments': [
- (r'[^*/]', Comment),
- (r'/\*', Comment, '#push'),
- (r'\*/', Comment, '#pop'),
- (r'[*/]', Comment),
- ],
- "root": [
- (r'\s+', Text.Whitespace),
- (r'[{};]+', Token.Punctuation),
- (r'(?<![\-\w])(and|or|not|\+|\.)(?![\-\w])', Token.Operator),
-
- (r'"(?:\\"|[^"])*?"', String.Double),
- (r"'(?:\\'|[^'])*?'", String.Single),
-
- (r'/\*', Comment, 'comments'),
- (r'//.*?$', Comment),
-
- #match BNF stmt for `node-identifier` with [ prefix ":"]
- (r'(?:^|(?<=[\s{};]))([\w.-]+)(:)([\w.-]+)(?=[\s{};])',
- bygroups(Name.Namespace, Token.Punctuation, Name.Variable)),
-
- #match BNF stmt `date-arg-str`
- (r'([0-9]{4}\-[0-9]{2}\-[0-9]{2})(?=[\s{};])', Name.Label),
- (r'([0-9]+\.[0-9]+)(?=[\s{};])', Number.Float),
- (r'([0-9]+)(?=[\s{};])', Number.Integer),
-
- (words(TOP_STMTS_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(MODULE_HEADER_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(META_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(LINKAGE_STMTS_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(BODY_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(DATA_DEF_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(TYPE_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(LIST_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
- (words(TYPES, suffix=suffix_re_pattern), Name.Class),
- (words(CONSTANTS_KEYWORDS, suffix=suffix_re_pattern), Name.Class),
-
- (r'[^;{}\s\'"]+', Name.Variable),
- ]
- }
+"""
+ pygments.lexers.yang
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the YANG 1.1 modeling language. See :rfc:`7950`.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import (RegexLexer, bygroups, words)
+from pygments.token import (Text, Token, Name, String, Comment,
+ Number)
+
+__all__ = ['YangLexer']
+
+class YangLexer(RegexLexer):
+ """
+ Lexer for `YANG <https://tools.ietf.org/html/rfc7950/>`_, based on RFC7950
+
+ .. versionadded:: 2.7
+ """
+ name = 'YANG'
+ aliases = ['yang']
+ filenames = ['*.yang']
+ mimetypes = ['application/yang']
+
+ #Keywords from RFC7950 ; oriented at BNF style
+ TOP_STMTS_KEYWORDS = ("module", "submodule")
+ MODULE_HEADER_STMT_KEYWORDS = ("belongs-to", "namespace", "prefix", "yang-version")
+ META_STMT_KEYWORDS = ("contact", "description", "organization",
+ "reference", "revision")
+ LINKAGE_STMTS_KEYWORDS = ("import", "include", "revision-date")
+ BODY_STMT_KEYWORDS = ("action", "argument", "augment", "deviation",
+ "extension", "feature", "grouping", "identity",
+ "if-feature", "input", "notification", "output",
+ "rpc", "typedef")
+ DATA_DEF_STMT_KEYWORDS = ("anydata", "anyxml", "case", "choice",
+ "config", "container", "deviate", "leaf",
+ "leaf-list", "list", "must", "presence",
+ "refine", "uses", "when")
+ TYPE_STMT_KEYWORDS = ("base", "bit", "default", "enum", "error-app-tag",
+ "error-message", "fraction-digits", "length",
+ "max-elements", "min-elements", "modifier",
+ "ordered-by", "path", "pattern", "position",
+ "range", "require-instance", "status", "type",
+ "units", "value", "yin-element")
+ LIST_STMT_KEYWORDS = ("key", "mandatory", "unique")
+
+ #RFC7950 other keywords
+ CONSTANTS_KEYWORDS = ("add", "current", "delete", "deprecated", "false",
+ "invert-match", "max", "min", "not-supported",
+ "obsolete", "replace", "true", "unbounded", "user")
+
+ #RFC7950 Built-In Types
+ TYPES = ("binary", "bits", "boolean", "decimal64", "empty", "enumeration",
+ "identityref", "instance-identifier", "int16", "int32", "int64",
+ "int8", "leafref", "string", "uint16", "uint32", "uint64",
+ "uint8", "union")
+
+ suffix_re_pattern = r'(?=[^\w\-:])'
+
+ tokens = {
+ 'comments': [
+ (r'[^*/]', Comment),
+ (r'/\*', Comment, '#push'),
+ (r'\*/', Comment, '#pop'),
+ (r'[*/]', Comment),
+ ],
+ "root": [
+ (r'\s+', Text.Whitespace),
+ (r'[{};]+', Token.Punctuation),
+ (r'(?<![\-\w])(and|or|not|\+|\.)(?![\-\w])', Token.Operator),
+
+ (r'"(?:\\"|[^"])*?"', String.Double),
+ (r"'(?:\\'|[^'])*?'", String.Single),
+
+ (r'/\*', Comment, 'comments'),
+ (r'//.*?$', Comment),
+
+ #match BNF stmt for `node-identifier` with [ prefix ":"]
+ (r'(?:^|(?<=[\s{};]))([\w.-]+)(:)([\w.-]+)(?=[\s{};])',
+ bygroups(Name.Namespace, Token.Punctuation, Name.Variable)),
+
+ #match BNF stmt `date-arg-str`
+ (r'([0-9]{4}\-[0-9]{2}\-[0-9]{2})(?=[\s{};])', Name.Label),
+ (r'([0-9]+\.[0-9]+)(?=[\s{};])', Number.Float),
+ (r'([0-9]+)(?=[\s{};])', Number.Integer),
+
+ (words(TOP_STMTS_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
+ (words(MODULE_HEADER_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
+ (words(META_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
+ (words(LINKAGE_STMTS_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
+ (words(BODY_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
+ (words(DATA_DEF_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
+ (words(TYPE_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
+ (words(LIST_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
+ (words(TYPES, suffix=suffix_re_pattern), Name.Class),
+ (words(CONSTANTS_KEYWORDS, suffix=suffix_re_pattern), Name.Class),
+
+ (r'[^;{}\s\'"]+', Name.Variable),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/zig.py b/contrib/python/Pygments/py3/pygments/lexers/zig.py
index 4a36832be5..f080f35807 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/zig.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/zig.py
@@ -1,123 +1,123 @@
-"""
- pygments.lexers.zig
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for Zig.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['ZigLexer']
-
-
-class ZigLexer(RegexLexer):
- """
- For `Zig <http://www.ziglang.org>`_ source code.
-
- grammar: https://ziglang.org/documentation/master/#Grammar
- """
- name = 'Zig'
- aliases = ['zig']
- filenames = ['*.zig']
- mimetypes = ['text/zig']
-
- type_keywords = (
- words(('bool', 'f16', 'f32', 'f64', 'f128', 'void', 'noreturn', 'type',
- 'anyerror', 'promise', 'i0', 'u0', 'isize', 'usize', 'comptime_int',
- 'comptime_float', 'c_short', 'c_ushort', 'c_int', 'c_uint', 'c_long',
- 'c_ulong', 'c_longlong', 'c_ulonglong', 'c_longdouble', 'c_void'
- 'i8', 'u8', 'i16', 'u16', 'i32', 'u32', 'i64', 'u64', 'i128',
- 'u128'), suffix=r'\b'),
- Keyword.Type)
-
- storage_keywords = (
- words(('const', 'var', 'extern', 'packed', 'export', 'pub', 'noalias',
- 'inline', 'comptime', 'nakedcc', 'stdcallcc', 'volatile', 'allowzero',
- 'align', 'linksection', 'threadlocal'), suffix=r'\b'),
- Keyword.Reserved)
-
- structure_keywords = (
- words(('struct', 'enum', 'union', 'error'), suffix=r'\b'),
- Keyword)
-
- statement_keywords = (
- words(('break', 'return', 'continue', 'asm', 'defer', 'errdefer',
- 'unreachable', 'try', 'catch', 'async', 'await', 'suspend',
- 'resume', 'cancel'), suffix=r'\b'),
- Keyword)
-
- conditional_keywords = (
- words(('if', 'else', 'switch', 'and', 'or', 'orelse'), suffix=r'\b'),
- Keyword)
-
- repeat_keywords = (
- words(('while', 'for'), suffix=r'\b'),
- Keyword)
-
- other_keywords = (
- words(('fn', 'usingnamespace', 'test'), suffix=r'\b'),
- Keyword)
-
- constant_keywords = (
- words(('true', 'false', 'null', 'undefined'), suffix=r'\b'),
- Keyword.Constant)
-
- tokens = {
- 'root': [
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'//.*?\n', Comment.Single),
-
- # Keywords
- statement_keywords,
- storage_keywords,
- structure_keywords,
- repeat_keywords,
- type_keywords,
- constant_keywords,
- conditional_keywords,
- other_keywords,
-
- # Floats
- (r'0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?', Number.Float),
- (r'0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+', Number.Float),
- (r'[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?', Number.Float),
- (r'[0-9]+\.?[eE][-+]?[0-9]+', Number.Float),
-
- # Integers
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- # Identifier
- (r'@[a-zA-Z_]\w*', Name.Builtin),
- (r'[a-zA-Z_]\w*', Name),
-
- # Characters
- (r'\'\\\'\'', String.Escape),
- (r'\'\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'',
- String.Escape),
- (r'\'[^\\\']\'', String),
-
- # Strings
- (r'\\\\[^\n]*', String.Heredoc),
- (r'c\\\\[^\n]*', String.Heredoc),
- (r'c?"', String, 'string'),
-
- # Operators, Punctuation
- (r'[+%=><|^!?/\-*&~:]', Operator),
- (r'[{}()\[\],.;]', Punctuation)
- ],
- 'string': [
- (r'\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])',
- String.Escape),
- (r'[^\\"\n]+', String),
- (r'"', String, '#pop')
- ]
- }
+"""
+ pygments.lexers.zig
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Zig.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace
+
+__all__ = ['ZigLexer']
+
+
+class ZigLexer(RegexLexer):
+ """
+ For `Zig <http://www.ziglang.org>`_ source code.
+
+ grammar: https://ziglang.org/documentation/master/#Grammar
+ """
+ name = 'Zig'
+ aliases = ['zig']
+ filenames = ['*.zig']
+ mimetypes = ['text/zig']
+
+ type_keywords = (
+ words(('bool', 'f16', 'f32', 'f64', 'f128', 'void', 'noreturn', 'type',
+ 'anyerror', 'promise', 'i0', 'u0', 'isize', 'usize', 'comptime_int',
+ 'comptime_float', 'c_short', 'c_ushort', 'c_int', 'c_uint', 'c_long',
+ 'c_ulong', 'c_longlong', 'c_ulonglong', 'c_longdouble', 'c_void'
+ 'i8', 'u8', 'i16', 'u16', 'i32', 'u32', 'i64', 'u64', 'i128',
+ 'u128'), suffix=r'\b'),
+ Keyword.Type)
+
+ storage_keywords = (
+ words(('const', 'var', 'extern', 'packed', 'export', 'pub', 'noalias',
+ 'inline', 'comptime', 'nakedcc', 'stdcallcc', 'volatile', 'allowzero',
+ 'align', 'linksection', 'threadlocal'), suffix=r'\b'),
+ Keyword.Reserved)
+
+ structure_keywords = (
+ words(('struct', 'enum', 'union', 'error'), suffix=r'\b'),
+ Keyword)
+
+ statement_keywords = (
+ words(('break', 'return', 'continue', 'asm', 'defer', 'errdefer',
+ 'unreachable', 'try', 'catch', 'async', 'await', 'suspend',
+ 'resume', 'cancel'), suffix=r'\b'),
+ Keyword)
+
+ conditional_keywords = (
+ words(('if', 'else', 'switch', 'and', 'or', 'orelse'), suffix=r'\b'),
+ Keyword)
+
+ repeat_keywords = (
+ words(('while', 'for'), suffix=r'\b'),
+ Keyword)
+
+ other_keywords = (
+ words(('fn', 'usingnamespace', 'test'), suffix=r'\b'),
+ Keyword)
+
+ constant_keywords = (
+ words(('true', 'false', 'null', 'undefined'), suffix=r'\b'),
+ Keyword.Constant)
+
+ tokens = {
+ 'root': [
+ (r'\n', Whitespace),
+ (r'\s+', Whitespace),
+ (r'//.*?\n', Comment.Single),
+
+ # Keywords
+ statement_keywords,
+ storage_keywords,
+ structure_keywords,
+ repeat_keywords,
+ type_keywords,
+ constant_keywords,
+ conditional_keywords,
+ other_keywords,
+
+ # Floats
+ (r'0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?', Number.Float),
+ (r'0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+', Number.Float),
+ (r'[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?', Number.Float),
+ (r'[0-9]+\.?[eE][-+]?[0-9]+', Number.Float),
+
+ # Integers
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+
+ # Identifier
+ (r'@[a-zA-Z_]\w*', Name.Builtin),
+ (r'[a-zA-Z_]\w*', Name),
+
+ # Characters
+ (r'\'\\\'\'', String.Escape),
+ (r'\'\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'',
+ String.Escape),
+ (r'\'[^\\\']\'', String),
+
+ # Strings
+ (r'\\\\[^\n]*', String.Heredoc),
+ (r'c\\\\[^\n]*', String.Heredoc),
+ (r'c?"', String, 'string'),
+
+ # Operators, Punctuation
+ (r'[+%=><|^!?/\-*&~:]', Operator),
+ (r'[{}()\[\],.;]', Punctuation)
+ ],
+ 'string': [
+ (r'\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])',
+ String.Escape),
+ (r'[^\\"\n]+', String),
+ (r'"', String, '#pop')
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/modeline.py b/contrib/python/Pygments/py3/pygments/modeline.py
index 047d86d6be..fff1fb9603 100644
--- a/contrib/python/Pygments/py3/pygments/modeline.py
+++ b/contrib/python/Pygments/py3/pygments/modeline.py
@@ -4,7 +4,7 @@
A simple modeline parser (based on pymodeline).
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/plugin.py b/contrib/python/Pygments/py3/pygments/plugin.py
index b1085b7ae4..4e3c1c814d 100644
--- a/contrib/python/Pygments/py3/pygments/plugin.py
+++ b/contrib/python/Pygments/py3/pygments/plugin.py
@@ -31,7 +31,7 @@
yourfilter = yourfilter:YourFilter
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
LEXER_ENTRY_POINT = 'pygments.lexers'
@@ -43,7 +43,7 @@ FILTER_ENTRY_POINT = 'pygments.filters'
def iter_entry_points(group_name):
try:
import pkg_resources
- except (ImportError, OSError):
+ except (ImportError, OSError):
return []
return pkg_resources.iter_entry_points(group_name)
diff --git a/contrib/python/Pygments/py3/pygments/regexopt.py b/contrib/python/Pygments/py3/pygments/regexopt.py
index cb2c8e21a9..885e270160 100644
--- a/contrib/python/Pygments/py3/pygments/regexopt.py
+++ b/contrib/python/Pygments/py3/pygments/regexopt.py
@@ -5,7 +5,7 @@
An algorithm that generates optimized regexes for matching long lists of
literal strings.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -15,7 +15,7 @@ from os.path import commonprefix
from itertools import groupby
from operator import itemgetter
-CS_ESCAPE = re.compile(r'[\[\^\\\-\]]')
+CS_ESCAPE = re.compile(r'[\[\^\\\-\]]')
FIRST_ELEMENT = itemgetter(0)
diff --git a/contrib/python/Pygments/py3/pygments/scanner.py b/contrib/python/Pygments/py3/pygments/scanner.py
index 5f32a22c3c..2baae184b0 100644
--- a/contrib/python/Pygments/py3/pygments/scanner.py
+++ b/contrib/python/Pygments/py3/pygments/scanner.py
@@ -11,7 +11,7 @@
Have a look at the `DelphiLexer` to get an idea of how to use
this scanner.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
@@ -24,7 +24,7 @@ class EndOfText(RuntimeError):
"""
-class Scanner:
+class Scanner:
"""
Simple scanner
diff --git a/contrib/python/Pygments/py3/pygments/sphinxext.py b/contrib/python/Pygments/py3/pygments/sphinxext.py
index 644eb8c338..8770e7928c 100644
--- a/contrib/python/Pygments/py3/pygments/sphinxext.py
+++ b/contrib/python/Pygments/py3/pygments/sphinxext.py
@@ -5,7 +5,7 @@
Sphinx extension to generate automatic documentation of lexers,
formatters and filters.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/style.py b/contrib/python/Pygments/py3/pygments/style.py
index 88ccaf263c..8ec58861b6 100644
--- a/contrib/python/Pygments/py3/pygments/style.py
+++ b/contrib/python/Pygments/py3/pygments/style.py
@@ -4,7 +4,7 @@
Basic style object.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -134,7 +134,7 @@ class StyleMeta(type):
color = _ansimap[color]
bgcolor = t[4]
if bgcolor in _deprecated_ansicolors:
- bgcolor = _deprecated_ansicolors[bgcolor]
+ bgcolor = _deprecated_ansicolors[bgcolor]
if bgcolor in ansicolors:
bgansicolor = bgcolor
bgcolor = _ansimap[bgcolor]
@@ -167,7 +167,7 @@ class StyleMeta(type):
return len(cls._styles)
-class Style(metaclass=StyleMeta):
+class Style(metaclass=StyleMeta):
#: overall background color (``None`` means transparent)
background_color = '#ffffff'
@@ -175,18 +175,18 @@ class Style(metaclass=StyleMeta):
#: highlight background color
highlight_color = '#ffffcc'
- #: line number font color
- line_number_color = 'inherit'
-
- #: line number background color
- line_number_background_color = 'transparent'
-
- #: special line number font color
- line_number_special_color = '#000000'
-
- #: special line number background color
- line_number_special_background_color = '#ffffc0'
-
+ #: line number font color
+ line_number_color = 'inherit'
+
+ #: line number background color
+ line_number_background_color = 'transparent'
+
+ #: special line number font color
+ line_number_special_color = '#000000'
+
+ #: special line number background color
+ line_number_special_background_color = '#ffffc0'
+
#: Style definitions for individual token types.
styles = {}
diff --git a/contrib/python/Pygments/py3/pygments/styles/__init__.py b/contrib/python/Pygments/py3/pygments/styles/__init__.py
index d75de1a521..572c7b75c2 100644
--- a/contrib/python/Pygments/py3/pygments/styles/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/styles/__init__.py
@@ -4,7 +4,7 @@
Contains built-in styles.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,7 +22,7 @@ STYLE_MAP = {
'autumn': 'autumn::AutumnStyle',
'murphy': 'murphy::MurphyStyle',
'manni': 'manni::ManniStyle',
- 'material': 'material::MaterialStyle',
+ 'material': 'material::MaterialStyle',
'monokai': 'monokai::MonokaiStyle',
'perldoc': 'perldoc::PerldocStyle',
'pastie': 'pastie::PastieStyle',
@@ -51,10 +51,10 @@ STYLE_MAP = {
'stata': 'stata_light::StataLightStyle',
'stata-light': 'stata_light::StataLightStyle',
'stata-dark': 'stata_dark::StataDarkStyle',
- 'inkpot': 'inkpot::InkPotStyle',
- 'zenburn': 'zenburn::ZenburnStyle',
- 'gruvbox-dark': 'gruvbox::GruvboxDarkStyle',
- 'gruvbox-light': 'gruvbox::GruvboxLightStyle',
+ 'inkpot': 'inkpot::InkPotStyle',
+ 'zenburn': 'zenburn::ZenburnStyle',
+ 'gruvbox-dark': 'gruvbox::GruvboxDarkStyle',
+ 'gruvbox-light': 'gruvbox::GruvboxLightStyle',
'dracula': 'dracula::DraculaStyle',
'one-dark': 'onedark::OneDarkStyle',
'lilypond' : 'lilypond::LilyPondStyle',
@@ -86,8 +86,8 @@ def get_style_by_name(name):
def get_all_styles():
- """Return a generator for all styles by name,
+ """Return a generator for all styles by name,
both builtin and plugin."""
- yield from STYLE_MAP
+ yield from STYLE_MAP
for name, _ in find_plugin_styles():
yield name
diff --git a/contrib/python/Pygments/py3/pygments/styles/abap.py b/contrib/python/Pygments/py3/pygments/styles/abap.py
index 1e51e05021..3aa5de67f2 100644
--- a/contrib/python/Pygments/py3/pygments/styles/abap.py
+++ b/contrib/python/Pygments/py3/pygments/styles/abap.py
@@ -4,7 +4,7 @@
ABAP workbench like style.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/algol.py b/contrib/python/Pygments/py3/pygments/styles/algol.py
index f293b0eca9..992596b8e6 100644
--- a/contrib/python/Pygments/py3/pygments/styles/algol.py
+++ b/contrib/python/Pygments/py3/pygments/styles/algol.py
@@ -25,7 +25,7 @@
[1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/algol_nu.py b/contrib/python/Pygments/py3/pygments/styles/algol_nu.py
index fe2846693b..a9ea298a17 100644
--- a/contrib/python/Pygments/py3/pygments/styles/algol_nu.py
+++ b/contrib/python/Pygments/py3/pygments/styles/algol_nu.py
@@ -25,7 +25,7 @@
[1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/arduino.py b/contrib/python/Pygments/py3/pygments/styles/arduino.py
index 4dfe0f3ce2..301535c09b 100644
--- a/contrib/python/Pygments/py3/pygments/styles/arduino.py
+++ b/contrib/python/Pygments/py3/pygments/styles/arduino.py
@@ -4,7 +4,7 @@
Arduino® Syntax highlighting style.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,7 +14,7 @@ from pygments.token import Keyword, Name, Comment, String, Error, \
class ArduinoStyle(Style):
- """
+ """
The Arduino® language style. This style is designed to highlight the
Arduino source code, so exepect the best results with it.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/autumn.py b/contrib/python/Pygments/py3/pygments/styles/autumn.py
index 85fd8982a9..f8651b7392 100644
--- a/contrib/python/Pygments/py3/pygments/styles/autumn.py
+++ b/contrib/python/Pygments/py3/pygments/styles/autumn.py
@@ -4,7 +4,7 @@
A colorful style, inspired by the terminal highlighting style.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/borland.py b/contrib/python/Pygments/py3/pygments/styles/borland.py
index 427e149f2f..e96e4d28b1 100644
--- a/contrib/python/Pygments/py3/pygments/styles/borland.py
+++ b/contrib/python/Pygments/py3/pygments/styles/borland.py
@@ -4,7 +4,7 @@
Style similar to the style used in the Borland IDEs.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/bw.py b/contrib/python/Pygments/py3/pygments/styles/bw.py
index 1b38538761..79604b8e3d 100644
--- a/contrib/python/Pygments/py3/pygments/styles/bw.py
+++ b/contrib/python/Pygments/py3/pygments/styles/bw.py
@@ -4,7 +4,7 @@
Simple black/white only style.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/colorful.py b/contrib/python/Pygments/py3/pygments/styles/colorful.py
index a67b8e3e61..1cf81b7850 100644
--- a/contrib/python/Pygments/py3/pygments/styles/colorful.py
+++ b/contrib/python/Pygments/py3/pygments/styles/colorful.py
@@ -4,7 +4,7 @@
A colorful style, inspired by CodeRay.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/default.py b/contrib/python/Pygments/py3/pygments/styles/default.py
index c69325f432..6b21ff6f29 100644
--- a/contrib/python/Pygments/py3/pygments/styles/default.py
+++ b/contrib/python/Pygments/py3/pygments/styles/default.py
@@ -4,7 +4,7 @@
The default highlighting style.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/emacs.py b/contrib/python/Pygments/py3/pygments/styles/emacs.py
index e9cbc33da8..e93d75f09f 100644
--- a/contrib/python/Pygments/py3/pygments/styles/emacs.py
+++ b/contrib/python/Pygments/py3/pygments/styles/emacs.py
@@ -4,7 +4,7 @@
A highlighting style for Pygments, inspired by Emacs.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/friendly.py b/contrib/python/Pygments/py3/pygments/styles/friendly.py
index 82f2479475..9eaf496a81 100644
--- a/contrib/python/Pygments/py3/pygments/styles/friendly.py
+++ b/contrib/python/Pygments/py3/pygments/styles/friendly.py
@@ -4,7 +4,7 @@
A modern style based on the VIM pyte theme.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,7 +20,7 @@ class FriendlyStyle(Style):
background_color = "#f0f0f0"
default_style = ""
- line_number_color = "#666666"
+ line_number_color = "#666666"
styles = {
Whitespace: "#bbbbbb",
diff --git a/contrib/python/Pygments/py3/pygments/styles/fruity.py b/contrib/python/Pygments/py3/pygments/styles/fruity.py
index 2446915d66..ba5ec09047 100644
--- a/contrib/python/Pygments/py3/pygments/styles/fruity.py
+++ b/contrib/python/Pygments/py3/pygments/styles/fruity.py
@@ -4,7 +4,7 @@
pygments version of my "fruity" vim theme.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/gruvbox.py b/contrib/python/Pygments/py3/pygments/styles/gruvbox.py
index 1077e6d86f..c2e59a2a2a 100644
--- a/contrib/python/Pygments/py3/pygments/styles/gruvbox.py
+++ b/contrib/python/Pygments/py3/pygments/styles/gruvbox.py
@@ -1,109 +1,109 @@
-"""
- pygments.styles.gruvbox
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- pygments version of the "gruvbox" vim theme.
- https://github.com/morhetz/gruvbox
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
+"""
+ pygments.styles.gruvbox
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ pygments version of the "gruvbox" vim theme.
+ https://github.com/morhetz/gruvbox
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
from pygments.token import Token, Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic
-
-
-class GruvboxDarkStyle(Style):
- """
- Pygments version of the "gruvbox" dark vim theme.
- """
-
- background_color = '#282828'
- highlight_color = '#ebdbb2'
-
- styles = {
+ Number, Operator, Generic
+
+
+class GruvboxDarkStyle(Style):
+ """
+ Pygments version of the "gruvbox" dark vim theme.
+ """
+
+ background_color = '#282828'
+ highlight_color = '#ebdbb2'
+
+ styles = {
Token: '#dddddd',
- Comment: 'italic #928374',
- Comment.PreProc: '#8ec07c',
- Comment.Special: 'bold italic #ebdbb2',
-
- Keyword: '#fb4934',
- Operator.Word: '#fb4934',
-
- String: '#b8bb26',
- String.Escape: '#fe8019',
-
- Number: '#d3869b',
-
- Name.Builtin: '#fe8019',
- Name.Variable: '#83a598',
- Name.Constant: '#d3869b',
- Name.Class: '#8ec07c',
- Name.Function: '#8ec07c',
- Name.Namespace: '#8ec07c',
- Name.Exception: '#fb4934',
- Name.Tag: '#8ec07c',
- Name.Attribute: '#fabd2f',
- Name.Decorator: '#fb4934',
-
- Generic.Heading: 'bold #ebdbb2',
- Generic.Subheading: 'underline #ebdbb2',
- Generic.Deleted: 'bg:#fb4934 #282828',
- Generic.Inserted: 'bg:#b8bb26 #282828',
- Generic.Error: '#fb4934',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#a89984',
- Generic.Output: '#f2e5bc',
- Generic.Traceback: '#fb4934',
-
- Error: 'bg:#fb4934 #282828'
- }
-
-class GruvboxLightStyle(Style):
- """
- Pygments version of the "gruvbox" Light vim theme.
- """
-
- background_color = '#fbf1c7'
- highlight_color = '#3c3836'
-
- styles = {
- Comment: 'italic #928374',
- Comment.PreProc: '#427b58',
- Comment.Special: 'bold italic #3c3836',
-
- Keyword: '#9d0006',
- Operator.Word: '#9d0006',
-
- String: '#79740e',
- String.Escape: '#af3a03',
-
- Number: '#8f3f71',
-
- Name.Builtin: '#af3a03',
- Name.Variable: '#076678',
- Name.Constant: '#8f3f71',
- Name.Class: '#427b58',
- Name.Function: '#427b58',
- Name.Namespace: '#427b58',
- Name.Exception: '#9d0006',
- Name.Tag: '#427b58',
- Name.Attribute: '#b57614',
- Name.Decorator: '#9d0006',
-
- Generic.Heading: 'bold #3c3836',
- Generic.Subheading: 'underline #3c3836',
- Generic.Deleted: 'bg:#9d0006 #fbf1c7',
- Generic.Inserted: 'bg:#79740e #fbf1c7',
- Generic.Error: '#9d0006',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#7c6f64',
- Generic.Output: '#32302f',
- Generic.Traceback: '#9d0006',
-
- Error: 'bg:#9d0006 #fbf1c7'
- }
+ Comment: 'italic #928374',
+ Comment.PreProc: '#8ec07c',
+ Comment.Special: 'bold italic #ebdbb2',
+
+ Keyword: '#fb4934',
+ Operator.Word: '#fb4934',
+
+ String: '#b8bb26',
+ String.Escape: '#fe8019',
+
+ Number: '#d3869b',
+
+ Name.Builtin: '#fe8019',
+ Name.Variable: '#83a598',
+ Name.Constant: '#d3869b',
+ Name.Class: '#8ec07c',
+ Name.Function: '#8ec07c',
+ Name.Namespace: '#8ec07c',
+ Name.Exception: '#fb4934',
+ Name.Tag: '#8ec07c',
+ Name.Attribute: '#fabd2f',
+ Name.Decorator: '#fb4934',
+
+ Generic.Heading: 'bold #ebdbb2',
+ Generic.Subheading: 'underline #ebdbb2',
+ Generic.Deleted: 'bg:#fb4934 #282828',
+ Generic.Inserted: 'bg:#b8bb26 #282828',
+ Generic.Error: '#fb4934',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#a89984',
+ Generic.Output: '#f2e5bc',
+ Generic.Traceback: '#fb4934',
+
+ Error: 'bg:#fb4934 #282828'
+ }
+
+class GruvboxLightStyle(Style):
+ """
+ Pygments version of the "gruvbox" Light vim theme.
+ """
+
+ background_color = '#fbf1c7'
+ highlight_color = '#3c3836'
+
+ styles = {
+ Comment: 'italic #928374',
+ Comment.PreProc: '#427b58',
+ Comment.Special: 'bold italic #3c3836',
+
+ Keyword: '#9d0006',
+ Operator.Word: '#9d0006',
+
+ String: '#79740e',
+ String.Escape: '#af3a03',
+
+ Number: '#8f3f71',
+
+ Name.Builtin: '#af3a03',
+ Name.Variable: '#076678',
+ Name.Constant: '#8f3f71',
+ Name.Class: '#427b58',
+ Name.Function: '#427b58',
+ Name.Namespace: '#427b58',
+ Name.Exception: '#9d0006',
+ Name.Tag: '#427b58',
+ Name.Attribute: '#b57614',
+ Name.Decorator: '#9d0006',
+
+ Generic.Heading: 'bold #3c3836',
+ Generic.Subheading: 'underline #3c3836',
+ Generic.Deleted: 'bg:#9d0006 #fbf1c7',
+ Generic.Inserted: 'bg:#79740e #fbf1c7',
+ Generic.Error: '#9d0006',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#7c6f64',
+ Generic.Output: '#32302f',
+ Generic.Traceback: '#9d0006',
+
+ Error: 'bg:#9d0006 #fbf1c7'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/igor.py b/contrib/python/Pygments/py3/pygments/styles/igor.py
index 278e9da1d4..88caf34f89 100644
--- a/contrib/python/Pygments/py3/pygments/styles/igor.py
+++ b/contrib/python/Pygments/py3/pygments/styles/igor.py
@@ -4,7 +4,7 @@
Igor Pro default style.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/inkpot.py b/contrib/python/Pygments/py3/pygments/styles/inkpot.py
index d2eff2c8d5..571bcab75b 100644
--- a/contrib/python/Pygments/py3/pygments/styles/inkpot.py
+++ b/contrib/python/Pygments/py3/pygments/styles/inkpot.py
@@ -1,66 +1,66 @@
-"""
- pygments.styles.inkpot
- ~~~~~~~~~~~~~~~~~~~~~~
-
- A highlighting style for Pygments, inspired by the Inkpot theme for VIM.
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Text, Other, Keyword, Name, Comment, String, \
- Error, Number, Operator, Generic, Whitespace, Punctuation
-
-
-class InkPotStyle(Style):
- background_color = "#1e1e27"
- default_style = ""
- styles = {
- Text: "#cfbfad",
- Other: "#cfbfad",
- Whitespace: "#434357",
- Comment: "#cd8b00",
- Comment.Preproc: "#409090",
- Comment.PreprocFile: "bg:#404040 #ffcd8b",
- Comment.Special: "#808bed",
-
- Keyword: "#808bed",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "#ff8bff",
-
- Operator: "#666666",
-
- Punctuation: "#cfbfad",
-
- Name: "#cfbfad",
- Name.Attribute: "#cfbfad",
- Name.Builtin.Pseudo: '#ffff00',
- Name.Builtin: "#808bed",
- Name.Class: "#ff8bff",
- Name.Constant: "#409090",
- Name.Decorator: "#409090",
- Name.Exception: "#ff0000",
- Name.Function: "#c080d0",
- Name.Label: "#808bed",
- Name.Namespace: "#ff0000",
- Name.Variable: "#cfbfad",
-
- String: "bg:#404040 #ffcd8b",
- String.Doc: "#808bed",
-
- Number: "#f0ad6d",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #000080",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "bg:#6e2e2e #ffffff"
- }
+"""
+ pygments.styles.inkpot
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ A highlighting style for Pygments, inspired by the Inkpot theme for VIM.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Text, Other, Keyword, Name, Comment, String, \
+ Error, Number, Operator, Generic, Whitespace, Punctuation
+
+
+class InkPotStyle(Style):
+ background_color = "#1e1e27"
+ default_style = ""
+ styles = {
+ Text: "#cfbfad",
+ Other: "#cfbfad",
+ Whitespace: "#434357",
+ Comment: "#cd8b00",
+ Comment.Preproc: "#409090",
+ Comment.PreprocFile: "bg:#404040 #ffcd8b",
+ Comment.Special: "#808bed",
+
+ Keyword: "#808bed",
+ Keyword.Pseudo: "nobold",
+ Keyword.Type: "#ff8bff",
+
+ Operator: "#666666",
+
+ Punctuation: "#cfbfad",
+
+ Name: "#cfbfad",
+ Name.Attribute: "#cfbfad",
+ Name.Builtin.Pseudo: '#ffff00',
+ Name.Builtin: "#808bed",
+ Name.Class: "#ff8bff",
+ Name.Constant: "#409090",
+ Name.Decorator: "#409090",
+ Name.Exception: "#ff0000",
+ Name.Function: "#c080d0",
+ Name.Label: "#808bed",
+ Name.Namespace: "#ff0000",
+ Name.Variable: "#cfbfad",
+
+ String: "bg:#404040 #ffcd8b",
+ String.Doc: "#808bed",
+
+ Number: "#f0ad6d",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #000080",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "bg:#6e2e2e #ffffff"
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/lovelace.py b/contrib/python/Pygments/py3/pygments/styles/lovelace.py
index ec8d2a9106..f4f023b070 100644
--- a/contrib/python/Pygments/py3/pygments/styles/lovelace.py
+++ b/contrib/python/Pygments/py3/pygments/styles/lovelace.py
@@ -8,7 +8,7 @@
A desaturated, somewhat subdued style created for the Lovelace interactive
learning environment.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/manni.py b/contrib/python/Pygments/py3/pygments/styles/manni.py
index 167dcebd5a..b3f0ee3d2d 100644
--- a/contrib/python/Pygments/py3/pygments/styles/manni.py
+++ b/contrib/python/Pygments/py3/pygments/styles/manni.py
@@ -7,7 +7,7 @@
This is a port of the style used in the `php port`_ of pygments
by Manni. The style is called 'default' there.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/material.py b/contrib/python/Pygments/py3/pygments/styles/material.py
index c7012fe516..84b14c3eca 100644
--- a/contrib/python/Pygments/py3/pygments/styles/material.py
+++ b/contrib/python/Pygments/py3/pygments/styles/material.py
@@ -1,118 +1,118 @@
-"""
- pygments.styles.material
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Mimic the Material theme color scheme.
-
- https://github.com/material-theme/vsc-material-theme
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Escape, \
- Error, Text, Number, Operator, Generic, Punctuation, Literal
-
-class MaterialStyle(Style):
- """
- This style mimics the Material Theme color scheme.
- """
- dark_teal = '#263238'
- white= '#FFFFFF'
- black= '#000000'
- red= '#FF5370'
- orange= '#F78C6C'
- yellow= '#FFCB6B'
- green= '#C3E88D'
- cyan= '#89DDFF'
- blue= '#82AAFF'
- paleblue= '#B2CCD6'
- purple= '#C792EA'
- brown= '#C17E70'
- pink= '#F07178'
- violet= '#BB80B3'
- foreground = '#EEFFFF'
- faded = '#546E7A'
-
- default_style = ""
- background_color = dark_teal
- highlight_color = '#2C3B41'
- line_number_color = '#37474F'
- line_number_background_color = dark_teal
- line_number_special_color = '#607A86'
- line_number_special_background_color = dark_teal
-
- styles = {
- Text: foreground,
- Escape: cyan,
- Error: red,
-
- Keyword: violet,
- Keyword.Constant: cyan,
- Keyword.Declaration: violet,
- Keyword.Namespace: 'italic ' + cyan,
- Keyword.Pseudo: cyan,
- Keyword.Type: violet,
-
- Name: foreground,
- Name.Attribute: violet,
- Name.Builtin: blue,
- Name.Builtin.Pseudo: cyan,
- Name.Class: yellow,
- Name.Constant: foreground,
- Name.Decorator: blue,
- Name.Entity: cyan,
- Name.Exception: yellow,
- Name.Function: blue,
- Name.Function.Magic: blue,
- Name.Label: blue,
- Name.Property: yellow,
- Name.Namespace: yellow,
- Name.Other: foreground,
- Name.Tag: red,
- Name.Variable: cyan,
- Name.Variable.Class: cyan,
- Name.Variable.Global: cyan,
- Name.Variable.Instance: cyan,
- Name.Variable.Magic: blue,
-
- Literal: green,
- Literal.Date: green,
-
- String: green,
- String.Affix: violet,
- String.Backtick: green,
- String.Char: green,
- String.Delimiter: foreground,
- String.Doc: 'italic ' + faded,
- String.Double: green,
- String.Escape: foreground,
- String.Heredoc: green,
- String.Interpol: cyan,
- String.Other: green,
- String.Regex: cyan,
- String.Single: green,
- String.Symbol: cyan,
-
- Number: orange,
-
- Operator: cyan,
- Operator.Word: 'italic ' + cyan,
-
- Punctuation: cyan,
-
- Comment: 'italic ' + faded,
-
- Generic: foreground,
- Generic.Deleted: red,
- Generic.Emph: cyan,
- Generic.Error: red,
- Generic.Heading: green,
- Generic.Inserted: green,
- Generic.Output: faded,
- Generic.Prompt: yellow,
- Generic.Strong: red,
- Generic.Subheading: cyan,
- Generic.Traceback: red,
- }
+"""
+ pygments.styles.material
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Mimic the Material theme color scheme.
+
+ https://github.com/material-theme/vsc-material-theme
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Escape, \
+ Error, Text, Number, Operator, Generic, Punctuation, Literal
+
+class MaterialStyle(Style):
+ """
+ This style mimics the Material Theme color scheme.
+ """
+ dark_teal = '#263238'
+ white= '#FFFFFF'
+ black= '#000000'
+ red= '#FF5370'
+ orange= '#F78C6C'
+ yellow= '#FFCB6B'
+ green= '#C3E88D'
+ cyan= '#89DDFF'
+ blue= '#82AAFF'
+ paleblue= '#B2CCD6'
+ purple= '#C792EA'
+ brown= '#C17E70'
+ pink= '#F07178'
+ violet= '#BB80B3'
+ foreground = '#EEFFFF'
+ faded = '#546E7A'
+
+ default_style = ""
+ background_color = dark_teal
+ highlight_color = '#2C3B41'
+ line_number_color = '#37474F'
+ line_number_background_color = dark_teal
+ line_number_special_color = '#607A86'
+ line_number_special_background_color = dark_teal
+
+ styles = {
+ Text: foreground,
+ Escape: cyan,
+ Error: red,
+
+ Keyword: violet,
+ Keyword.Constant: cyan,
+ Keyword.Declaration: violet,
+ Keyword.Namespace: 'italic ' + cyan,
+ Keyword.Pseudo: cyan,
+ Keyword.Type: violet,
+
+ Name: foreground,
+ Name.Attribute: violet,
+ Name.Builtin: blue,
+ Name.Builtin.Pseudo: cyan,
+ Name.Class: yellow,
+ Name.Constant: foreground,
+ Name.Decorator: blue,
+ Name.Entity: cyan,
+ Name.Exception: yellow,
+ Name.Function: blue,
+ Name.Function.Magic: blue,
+ Name.Label: blue,
+ Name.Property: yellow,
+ Name.Namespace: yellow,
+ Name.Other: foreground,
+ Name.Tag: red,
+ Name.Variable: cyan,
+ Name.Variable.Class: cyan,
+ Name.Variable.Global: cyan,
+ Name.Variable.Instance: cyan,
+ Name.Variable.Magic: blue,
+
+ Literal: green,
+ Literal.Date: green,
+
+ String: green,
+ String.Affix: violet,
+ String.Backtick: green,
+ String.Char: green,
+ String.Delimiter: foreground,
+ String.Doc: 'italic ' + faded,
+ String.Double: green,
+ String.Escape: foreground,
+ String.Heredoc: green,
+ String.Interpol: cyan,
+ String.Other: green,
+ String.Regex: cyan,
+ String.Single: green,
+ String.Symbol: cyan,
+
+ Number: orange,
+
+ Operator: cyan,
+ Operator.Word: 'italic ' + cyan,
+
+ Punctuation: cyan,
+
+ Comment: 'italic ' + faded,
+
+ Generic: foreground,
+ Generic.Deleted: red,
+ Generic.Emph: cyan,
+ Generic.Error: red,
+ Generic.Heading: green,
+ Generic.Inserted: green,
+ Generic.Output: faded,
+ Generic.Prompt: yellow,
+ Generic.Strong: red,
+ Generic.Subheading: cyan,
+ Generic.Traceback: red,
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/monokai.py b/contrib/python/Pygments/py3/pygments/styles/monokai.py
index 9de82f35ef..b114293df9 100644
--- a/contrib/python/Pygments/py3/pygments/styles/monokai.py
+++ b/contrib/python/Pygments/py3/pygments/styles/monokai.py
@@ -6,7 +6,7 @@
http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -91,15 +91,15 @@ class MonokaiStyle(Style):
String.Single: "", # class: 's1'
String.Symbol: "", # class: 'ss'
-
+
Generic: "", # class: 'g'
Generic.Deleted: "#f92672", # class: 'gd',
Generic.Emph: "italic", # class: 'ge'
Generic.Error: "", # class: 'gr'
Generic.Heading: "", # class: 'gh'
Generic.Inserted: "#a6e22e", # class: 'gi'
- Generic.Output: "#66d9ef", # class: 'go'
- Generic.Prompt: "bold #f92672", # class: 'gp'
+ Generic.Output: "#66d9ef", # class: 'go'
+ Generic.Prompt: "bold #f92672", # class: 'gp'
Generic.Strong: "bold", # class: 'gs'
Generic.Subheading: "#75715e", # class: 'gu'
Generic.Traceback: "", # class: 'gt'
diff --git a/contrib/python/Pygments/py3/pygments/styles/murphy.py b/contrib/python/Pygments/py3/pygments/styles/murphy.py
index 9115c2d87f..0774f5e646 100644
--- a/contrib/python/Pygments/py3/pygments/styles/murphy.py
+++ b/contrib/python/Pygments/py3/pygments/styles/murphy.py
@@ -4,7 +4,7 @@
Murphy's style from CodeRay.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/native.py b/contrib/python/Pygments/py3/pygments/styles/native.py
index 9c2f2c16b8..08303351f1 100644
--- a/contrib/python/Pygments/py3/pygments/styles/native.py
+++ b/contrib/python/Pygments/py3/pygments/styles/native.py
@@ -4,7 +4,7 @@
pygments version of my "native" vim theme.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,7 +20,7 @@ class NativeStyle(Style):
background_color = '#202020'
highlight_color = '#404040'
- line_number_color = '#aaaaaa'
+ line_number_color = '#aaaaaa'
styles = {
Token: '#d0d0d0',
diff --git a/contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py b/contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py
index 0c98f0058d..857c34ae49 100644
--- a/contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py
+++ b/contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py
@@ -8,7 +8,7 @@
Created with Base16 Builder by Chris Kempson
(https://github.com/chriskempson/base16-builder).
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/paraiso_light.py b/contrib/python/Pygments/py3/pygments/styles/paraiso_light.py
index 4c9f1392ac..6ae2bf1819 100644
--- a/contrib/python/Pygments/py3/pygments/styles/paraiso_light.py
+++ b/contrib/python/Pygments/py3/pygments/styles/paraiso_light.py
@@ -8,7 +8,7 @@
Created with Base16 Builder by Chris Kempson
(https://github.com/chriskempson/base16-builder).
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/pastie.py b/contrib/python/Pygments/py3/pygments/styles/pastie.py
index 14ec7007f5..d2b0bd5f60 100644
--- a/contrib/python/Pygments/py3/pygments/styles/pastie.py
+++ b/contrib/python/Pygments/py3/pygments/styles/pastie.py
@@ -6,7 +6,7 @@
.. _pastie: http://pastie.caboo.se/
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/perldoc.py b/contrib/python/Pygments/py3/pygments/styles/perldoc.py
index 5eae210fd8..3edb0ab848 100644
--- a/contrib/python/Pygments/py3/pygments/styles/perldoc.py
+++ b/contrib/python/Pygments/py3/pygments/styles/perldoc.py
@@ -6,7 +6,7 @@
.. _perldoc: http://perldoc.perl.org/
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/rainbow_dash.py b/contrib/python/Pygments/py3/pygments/styles/rainbow_dash.py
index 3996332a25..f8a7fd4370 100644
--- a/contrib/python/Pygments/py3/pygments/styles/rainbow_dash.py
+++ b/contrib/python/Pygments/py3/pygments/styles/rainbow_dash.py
@@ -6,7 +6,7 @@
.. _theme: http://sanssecours.github.io/Rainbow-Dash.tmbundle
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/rrt.py b/contrib/python/Pygments/py3/pygments/styles/rrt.py
index 0b58152d4c..39c1496382 100644
--- a/contrib/python/Pygments/py3/pygments/styles/rrt.py
+++ b/contrib/python/Pygments/py3/pygments/styles/rrt.py
@@ -4,7 +4,7 @@
pygments "rrt" theme, based on Zap and Emacs defaults.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/sas.py b/contrib/python/Pygments/py3/pygments/styles/sas.py
index ed5eb2666a..01cb531fb1 100644
--- a/contrib/python/Pygments/py3/pygments/styles/sas.py
+++ b/contrib/python/Pygments/py3/pygments/styles/sas.py
@@ -6,7 +6,7 @@
meant to be a complete style. It's merely meant to mimic SAS'
program editor syntax highlighting.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/solarized.py b/contrib/python/Pygments/py3/pygments/styles/solarized.py
index 1e051eb477..f54d5783da 100644
--- a/contrib/python/Pygments/py3/pygments/styles/solarized.py
+++ b/contrib/python/Pygments/py3/pygments/styles/solarized.py
@@ -7,7 +7,7 @@
A Pygments style for the Solarized themes (licensed under MIT).
See: https://github.com/altercation/solarized
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -57,15 +57,15 @@ def make_style(colors):
Number: colors['cyan'],
- Generic: colors['base0'],
+ Generic: colors['base0'],
Generic.Deleted: colors['red'],
Generic.Emph: 'italic',
Generic.Error: colors['red'],
Generic.Heading: 'bold',
Generic.Subheading: 'underline',
Generic.Inserted: colors['green'],
- Generic.Output: colors['base0'],
- Generic.Prompt: 'bold ' + colors['blue'],
+ Generic.Output: colors['base0'],
+ Generic.Prompt: 'bold ' + colors['blue'],
Generic.Strong: 'bold',
Generic.Traceback: colors['blue'],
@@ -120,8 +120,8 @@ class SolarizedDarkStyle(Style):
styles = make_style(DARK_COLORS)
background_color = DARK_COLORS['base03']
highlight_color = DARK_COLORS['base02']
- line_number_color = DARK_COLORS['base01']
- line_number_background_color = DARK_COLORS['base02']
+ line_number_color = DARK_COLORS['base01']
+ line_number_background_color = DARK_COLORS['base02']
class SolarizedLightStyle(SolarizedDarkStyle):
@@ -132,5 +132,5 @@ class SolarizedLightStyle(SolarizedDarkStyle):
styles = make_style(LIGHT_COLORS)
background_color = LIGHT_COLORS['base03']
highlight_color = LIGHT_COLORS['base02']
- line_number_color = LIGHT_COLORS['base01']
- line_number_background_color = LIGHT_COLORS['base02']
+ line_number_color = LIGHT_COLORS['base01']
+ line_number_background_color = LIGHT_COLORS['base02']
diff --git a/contrib/python/Pygments/py3/pygments/styles/stata_dark.py b/contrib/python/Pygments/py3/pygments/styles/stata_dark.py
index 86d23be002..f4f5a569be 100644
--- a/contrib/python/Pygments/py3/pygments/styles/stata_dark.py
+++ b/contrib/python/Pygments/py3/pygments/styles/stata_dark.py
@@ -6,7 +6,7 @@
meant to be a complete style, just for Stata's file formats.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/stata_light.py b/contrib/python/Pygments/py3/pygments/styles/stata_light.py
index 9dead703dc..93c2c998be 100644
--- a/contrib/python/Pygments/py3/pygments/styles/stata_light.py
+++ b/contrib/python/Pygments/py3/pygments/styles/stata_light.py
@@ -5,7 +5,7 @@
Light Style inspired by Stata's do-file editor. Note this is not
meant to be a complete style, just for Stata's file formats.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/tango.py b/contrib/python/Pygments/py3/pygments/styles/tango.py
index 7a2f005b6c..a7335b9448 100644
--- a/contrib/python/Pygments/py3/pygments/styles/tango.py
+++ b/contrib/python/Pygments/py3/pygments/styles/tango.py
@@ -32,7 +32,7 @@
have been chosen to have the same style. Similarly, keywords (Keyword.*),
and Operator.Word (and, or, in) have been assigned the same style.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/trac.py b/contrib/python/Pygments/py3/pygments/styles/trac.py
index b2a1fd4d2e..d46682a0c0 100644
--- a/contrib/python/Pygments/py3/pygments/styles/trac.py
+++ b/contrib/python/Pygments/py3/pygments/styles/trac.py
@@ -4,7 +4,7 @@
Port of the default trac highlighter design.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/vim.py b/contrib/python/Pygments/py3/pygments/styles/vim.py
index f498606bdb..f869c01ade 100644
--- a/contrib/python/Pygments/py3/pygments/styles/vim.py
+++ b/contrib/python/Pygments/py3/pygments/styles/vim.py
@@ -4,7 +4,7 @@
A highlighting style for Pygments, inspired by vim.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/vs.py b/contrib/python/Pygments/py3/pygments/styles/vs.py
index a504f912a2..420adccb2a 100644
--- a/contrib/python/Pygments/py3/pygments/styles/vs.py
+++ b/contrib/python/Pygments/py3/pygments/styles/vs.py
@@ -4,7 +4,7 @@
Simple style with MS Visual Studio colors.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/xcode.py b/contrib/python/Pygments/py3/pygments/styles/xcode.py
index fd8621e7d1..fe31b833d8 100644
--- a/contrib/python/Pygments/py3/pygments/styles/xcode.py
+++ b/contrib/python/Pygments/py3/pygments/styles/xcode.py
@@ -4,7 +4,7 @@
Style similar to the `Xcode` default theme.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/zenburn.py b/contrib/python/Pygments/py3/pygments/styles/zenburn.py
index 8797df7f25..d856ff9c6e 100644
--- a/contrib/python/Pygments/py3/pygments/styles/zenburn.py
+++ b/contrib/python/Pygments/py3/pygments/styles/zenburn.py
@@ -1,80 +1,80 @@
-"""
- pygments.styles.zenburn
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Low contrast color scheme Zenburn.
-
- See: https://kippura.org/zenburnpage/
- https://github.com/jnurmine/Zenburn
-
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import (
- Token, Name, Operator, Keyword, Generic, Comment, Number, String, Literal,
- Punctuation, Error,
-)
-
-
-class ZenburnStyle(Style):
- """
- Low contrast Zenburn style.
- """
-
- default_style = ""
- background_color = '#3f3f3f'
- highlight_color = '#484848'
- line_number_color = '#5d6262'
- line_number_background_color = '#353535'
- line_number_special_color = '#7a8080'
- line_number_special_background_color = '#353535'
- styles = {
- Token: '#dcdccc',
- Error: '#e37170 bold',
-
- Keyword: '#efdcbc',
- Keyword.Type: '#dfdfbf bold',
- Keyword.Constant: '#dca3a3',
- Keyword.Declaration: '#f0dfaf',
- Keyword.Namespace: '#f0dfaf',
-
- Name: '#dcdccc',
- Name.Tag: '#e89393 bold',
- Name.Entity: '#cfbfaf',
- Name.Constant: '#dca3a3',
- Name.Class: '#efef8f',
- Name.Function: '#efef8f',
- Name.Builtin: '#efef8f',
- Name.Builtin.Pseudo: '#dcdccc',
- Name.Attribute: '#efef8f',
- Name.Exception: '#c3bf9f bold',
-
- Literal: '#9fafaf',
-
- String: '#cc9393',
- String.Doc: '#7f9f7f',
- String.Interpol: '#dca3a3 bold',
-
- Number: '#8cd0d3',
- Number.Float: '#c0bed1',
-
- Operator: '#f0efd0',
-
- Punctuation: '#f0efd0',
-
- Comment: '#7f9f7f italic',
- Comment.Preproc: '#dfaf8f bold',
- Comment.PreprocFile: '#cc9393',
- Comment.Special: '#dfdfdf bold',
-
- Generic: '#ecbcbc bold',
- Generic.Emph: '#ffffff bold',
- Generic.Output: '#5b605e bold',
- Generic.Heading: '#efefef bold',
- Generic.Deleted: '#c3bf9f bg:#313c36',
- Generic.Inserted: '#709080 bg:#313c36 bold',
- Generic.Traceback: '#80d4aa bg:#2f2f2f bold',
- Generic.Subheading: '#efefef bold',
- }
+"""
+ pygments.styles.zenburn
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Low contrast color scheme Zenburn.
+
+ See: https://kippura.org/zenburnpage/
+ https://github.com/jnurmine/Zenburn
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import (
+ Token, Name, Operator, Keyword, Generic, Comment, Number, String, Literal,
+ Punctuation, Error,
+)
+
+
+class ZenburnStyle(Style):
+ """
+ Low contrast Zenburn style.
+ """
+
+ default_style = ""
+ background_color = '#3f3f3f'
+ highlight_color = '#484848'
+ line_number_color = '#5d6262'
+ line_number_background_color = '#353535'
+ line_number_special_color = '#7a8080'
+ line_number_special_background_color = '#353535'
+ styles = {
+ Token: '#dcdccc',
+ Error: '#e37170 bold',
+
+ Keyword: '#efdcbc',
+ Keyword.Type: '#dfdfbf bold',
+ Keyword.Constant: '#dca3a3',
+ Keyword.Declaration: '#f0dfaf',
+ Keyword.Namespace: '#f0dfaf',
+
+ Name: '#dcdccc',
+ Name.Tag: '#e89393 bold',
+ Name.Entity: '#cfbfaf',
+ Name.Constant: '#dca3a3',
+ Name.Class: '#efef8f',
+ Name.Function: '#efef8f',
+ Name.Builtin: '#efef8f',
+ Name.Builtin.Pseudo: '#dcdccc',
+ Name.Attribute: '#efef8f',
+ Name.Exception: '#c3bf9f bold',
+
+ Literal: '#9fafaf',
+
+ String: '#cc9393',
+ String.Doc: '#7f9f7f',
+ String.Interpol: '#dca3a3 bold',
+
+ Number: '#8cd0d3',
+ Number.Float: '#c0bed1',
+
+ Operator: '#f0efd0',
+
+ Punctuation: '#f0efd0',
+
+ Comment: '#7f9f7f italic',
+ Comment.Preproc: '#dfaf8f bold',
+ Comment.PreprocFile: '#cc9393',
+ Comment.Special: '#dfdfdf bold',
+
+ Generic: '#ecbcbc bold',
+ Generic.Emph: '#ffffff bold',
+ Generic.Output: '#5b605e bold',
+ Generic.Heading: '#efefef bold',
+ Generic.Deleted: '#c3bf9f bg:#313c36',
+ Generic.Inserted: '#709080 bg:#313c36 bold',
+ Generic.Traceback: '#80d4aa bg:#2f2f2f bold',
+ Generic.Subheading: '#efefef bold',
+ }
diff --git a/contrib/python/Pygments/py3/pygments/token.py b/contrib/python/Pygments/py3/pygments/token.py
index 9013acb709..be5baa8bfa 100644
--- a/contrib/python/Pygments/py3/pygments/token.py
+++ b/contrib/python/Pygments/py3/pygments/token.py
@@ -4,7 +4,7 @@
Basic token types and the standard tokens.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/unistring.py b/contrib/python/Pygments/py3/pygments/unistring.py
index 2872985c14..a8242afefa 100644
--- a/contrib/python/Pygments/py3/pygments/unistring.py
+++ b/contrib/python/Pygments/py3/pygments/unistring.py
@@ -7,87 +7,87 @@
Inspired by chartypes_create.py from the MoinMoin project.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-Cc = '\x00-\x1f\x7f-\x9f'
+Cc = '\x00-\x1f\x7f-\x9f'
-Cf = '\xad\u0600-\u0605\u061c\u06dd\u070f\u08e2\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb\U000110bd\U000110cd\U0001bca0-\U0001bca3\U0001d173-\U0001d17a\U000e0001\U000e0020-\U000e007f'
+Cf = '\xad\u0600-\u0605\u061c\u06dd\u070f\u08e2\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb\U000110bd\U000110cd\U0001bca0-\U0001bca3\U0001d173-\U0001d17a\U000e0001\U000e0020-\U000e007f'
-Cn = '\u0378-\u0379\u0380-\u0383\u038b\u038d\u03a2\u0530\u0557-\u0558\u058b-\u058c\u0590\u05c8-\u05cf\u05eb-\u05ee\u05f5-\u05ff\u061d\u070e\u074b-\u074c\u07b2-\u07bf\u07fb-\u07fc\u082e-\u082f\u083f\u085c-\u085d\u085f\u086b-\u089f\u08b5\u08be-\u08d2\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09c5-\u09c6\u09c9-\u09ca\u09cf-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09ff-\u0a00\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a58\u0a5d\u0a5f-\u0a65\u0a77-\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae4-\u0ae5\u0af2-\u0af8\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3b\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b64-\u0b65\u0b78-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bcf\u0bd1-\u0bd6\u0bd8-\u0be5\u0bfb-\u0bff\u0c0d\u0c11\u0c29\u0c3a-\u0c3c\u0c45\u0c49\u0c4e-\u0c54\u0c57\u0c5b-\u0c5f\u0c64-\u0c65\u0c70-\u0c77\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbb\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce4-\u0ce5\u0cf0\u0cf3-\u0cff\u0d04\u0d0d\u0d11\u0d45\u0d49\u0d50-\u0d53\u0d64-\u0d65\u0d80-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0de5\u0df0-\u0df1\u0df5-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ee0-\u0eff\u0f48\u0f6d-\u0f70\u0f98\u0fbd\u0fcd\u0fdb-\u0fff\u10c6\u10c8-\u10cc\u10ce-\u10cf\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u135c\u137d-\u137f\u139a-\u139f\u13f6-\u13f7\u13fe-\u13ff\u169d-\u169f\u16f9-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u177f\u17de-\u17df\u17ea-\u17ef\u17fa-\u17ff\u180f\u181a-\u181f\u1879-\u187f\u18ab-\u18af\u18f6-\u18ff\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u197f\u19ac-\u19af\u19ca-\u19cf\u19db-\u19dd\u1a1c-\u1a1d\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1aaf\u1abf-\u1aff\u1b4c-\u1b4f\u1b7d-\u1b7f\u1bf4-\u1bfb\u1c38-\u1c3a\u1c4a-\u1c4c\u1c89-\u1c8f\u1cbb-\u1cbc\u1cc8-\u1ccf\u1cfa-\u1cff\u1dfa\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u2065\u2072-\u2073\u208f\u209d-\u209f\u20c0-\u20cf\u20f1-\u20ff\u218c-\u218f\u2427-\u243f\u244b-\u245f\u2b74-\u2b75\u2b96-\u2b97\u2bc9\u2bff\u2c2f\u2c5f\u2cf4-\u2cf8\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d71-\u2d7e\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e4f-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u2fff\u3040\u3097-\u3098\u3100-\u3104\u3130\u318f\u31bb-\u31bf\u31e4-\u31ef\u321f\u32ff\u4db6-\u4dbf\u9ff0-\u9fff\ua48d-\ua48f\ua4c7-\ua4cf\ua62c-\ua63f\ua6f8-\ua6ff\ua7ba-\ua7f6\ua82c-\ua82f\ua83a-\ua83f\ua878-\ua87f\ua8c6-\ua8cd\ua8da-\ua8df\ua954-\ua95e\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaac3-\uaada\uaaf7-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f\uab66-\uab6f\uabee-\uabef\uabfa-\uabff\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\ud7ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbc2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfe-\ufdff\ufe1a-\ufe1f\ufe53\ufe67\ufe6c-\ufe6f\ufe75\ufefd-\ufefe\uff00\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufff8\ufffe-\uffff\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U0001018f\U0001019c-\U0001019f\U000101a1-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102df\U000102fc-\U000102ff\U00010324-\U0001032c\U0001034b-\U0001034f\U0001037b-\U0001037f\U0001039e\U000103c4-\U000103c7\U000103d6-\U000103ff\U0001049e-\U0001049f\U000104aa-\U000104af\U000104d4-\U000104d7\U000104fc-\U000104ff\U00010528-\U0001052f\U00010564-\U0001056e\U00010570-\U000105ff\U00010737-\U0001073f\U00010756-\U0001075f\U00010768-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856\U0001089f-\U000108a6\U000108b0-\U000108df\U000108f3\U000108f6-\U000108fa\U0001091c-\U0001091e\U0001093a-\U0001093e\U00010940-\U0001097f\U000109b8-\U000109bb\U000109d0-\U000109d1\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a36-\U00010a37\U00010a3b-\U00010a3e\U00010a49-\U00010a4f\U00010a59-\U00010a5f\U00010aa0-\U00010abf\U00010ae7-\U00010aea\U00010af7-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b92-\U00010b98\U00010b9d-\U00010ba8\U00010bb0-\U00010bff\U00010c49-\U00010c7f\U00010cb3-\U00010cbf\U00010cf3-\U00010cf9\U00010d28-\U00010d2f\U00010d3a-\U00010e5f\U00010e7f-\U00010eff\U00010f28-\U00010f2f\U00010f5a-\U00010fff\U0001104e-\U00011051\U00011070-\U0001107e\U000110c2-\U000110cc\U000110ce-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011147-\U0001114f\U00011177-\U0001117f\U000111ce-\U000111cf\U000111e0\U000111f5-\U000111ff\U00011212\U0001123f-\U0001127f\U00011287\U00011289\U0001128e\U0001129e\U000112aa-\U000112af\U000112eb-\U000112ef\U000112fa-\U000112ff\U00011304\U0001130d-\U0001130e\U00011311-\U00011312\U00011329\U00011331\U00011334\U0001133a\U00011345-\U00011346\U00011349-\U0001134a\U0001134e-\U0001134f\U00011351-\U00011356\U00011358-\U0001135c\U00011364-\U00011365\U0001136d-\U0001136f\U00011375-\U000113ff\U0001145a\U0001145c\U0001145f-\U0001147f\U000114c8-\U000114cf\U000114da-\U0001157f\U000115b6-\U000115b7\U000115de-\U000115ff\U00011645-\U0001164f\U0001165a-\U0001165f\U0001166d-\U0001167f\U000116b8-\U000116bf\U000116ca-\U000116ff\U0001171b-\U0001171c\U0001172c-\U0001172f\U00011740-\U000117ff\U0001183c-\U0001189f\U000118f3-\U000118fe\U00011900-\U000119ff\U00011a48-\U00011a4f\U00011a84-\U00011a85\U00011aa3-\U00011abf\U00011af9-\U00011bff\U00011c09\U00011c37\U00011c46-\U00011c4f\U00011c6d-\U00011c6f\U00011c90-\U00011c91\U00011ca8\U00011cb7-\U00011cff\U00011d07\U00011d0a\U00011d37-\U00011d39\U00011d3b\U00011d3e\U00011d48-\U00011d4f\U00011d5a-\U00011d5f\U00011d66\U00011d69\U00011d8f\U00011d92\U00011d99-\U00011d9f\U00011daa-\U00011edf\U00011ef9-\U00011fff\U0001239a-\U000123ff\U0001246f\U00012475-\U0001247f\U00012544-\U00012fff\U0001342f-\U000143ff\U00014647-\U000167ff\U00016a39-\U00016a3f\U00016a5f\U00016a6a-\U00016a6d\U00016a70-\U00016acf\U00016aee-\U00016aef\U00016af6-\U00016aff\U00016b46-\U00016b4f\U00016b5a\U00016b62\U00016b78-\U00016b7c\U00016b90-\U00016e3f\U00016e9b-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U00016fdf\U00016fe2-\U00016fff\U000187f2-\U000187ff\U00018af3-\U0001afff\U0001b11f-\U0001b16f\U0001b2fc-\U0001bbff\U0001bc6b-\U0001bc6f\U0001bc7d-\U0001bc7f\U0001bc89-\U0001bc8f\U0001bc9a-\U0001bc9b\U0001bca4-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d1e9-\U0001d1ff\U0001d246-\U0001d2df\U0001d2f4-\U0001d2ff\U0001d357-\U0001d35f\U0001d379-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d7cc-\U0001d7cd\U0001da8c-\U0001da9a\U0001daa0\U0001dab0-\U0001dfff\U0001e007\U0001e019-\U0001e01a\U0001e022\U0001e025\U0001e02b-\U0001e7ff\U0001e8c5-\U0001e8c6\U0001e8d7-\U0001e8ff\U0001e94b-\U0001e94f\U0001e95a-\U0001e95d\U0001e960-\U0001ec70\U0001ecb5-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0c0\U0001f0d0\U0001f0f6-\U0001f0ff\U0001f10d-\U0001f10f\U0001f16c-\U0001f16f\U0001f1ad-\U0001f1e5\U0001f203-\U0001f20f\U0001f23c-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f25f\U0001f266-\U0001f2ff\U0001f6d5-\U0001f6df\U0001f6ed-\U0001f6ef\U0001f6fa-\U0001f6ff\U0001f774-\U0001f77f\U0001f7d9-\U0001f7ff\U0001f80c-\U0001f80f\U0001f848-\U0001f84f\U0001f85a-\U0001f85f\U0001f888-\U0001f88f\U0001f8ae-\U0001f8ff\U0001f90c-\U0001f90f\U0001f93f\U0001f971-\U0001f972\U0001f977-\U0001f979\U0001f97b\U0001f9a3-\U0001f9af\U0001f9ba-\U0001f9bf\U0001f9c3-\U0001f9cf\U0001fa00-\U0001fa5f\U0001fa6e-\U0001ffff\U0002a6d7-\U0002a6ff\U0002b735-\U0002b73f\U0002b81e-\U0002b81f\U0002cea2-\U0002ceaf\U0002ebe1-\U0002f7ff\U0002fa1e-\U000e0000\U000e0002-\U000e001f\U000e0080-\U000e00ff\U000e01f0-\U000effff\U000ffffe-\U000fffff\U0010fffe-\U0010ffff'
+Cn = '\u0378-\u0379\u0380-\u0383\u038b\u038d\u03a2\u0530\u0557-\u0558\u058b-\u058c\u0590\u05c8-\u05cf\u05eb-\u05ee\u05f5-\u05ff\u061d\u070e\u074b-\u074c\u07b2-\u07bf\u07fb-\u07fc\u082e-\u082f\u083f\u085c-\u085d\u085f\u086b-\u089f\u08b5\u08be-\u08d2\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09c5-\u09c6\u09c9-\u09ca\u09cf-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09ff-\u0a00\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a58\u0a5d\u0a5f-\u0a65\u0a77-\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae4-\u0ae5\u0af2-\u0af8\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3b\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b64-\u0b65\u0b78-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bcf\u0bd1-\u0bd6\u0bd8-\u0be5\u0bfb-\u0bff\u0c0d\u0c11\u0c29\u0c3a-\u0c3c\u0c45\u0c49\u0c4e-\u0c54\u0c57\u0c5b-\u0c5f\u0c64-\u0c65\u0c70-\u0c77\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbb\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce4-\u0ce5\u0cf0\u0cf3-\u0cff\u0d04\u0d0d\u0d11\u0d45\u0d49\u0d50-\u0d53\u0d64-\u0d65\u0d80-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0de5\u0df0-\u0df1\u0df5-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ee0-\u0eff\u0f48\u0f6d-\u0f70\u0f98\u0fbd\u0fcd\u0fdb-\u0fff\u10c6\u10c8-\u10cc\u10ce-\u10cf\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u135c\u137d-\u137f\u139a-\u139f\u13f6-\u13f7\u13fe-\u13ff\u169d-\u169f\u16f9-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u177f\u17de-\u17df\u17ea-\u17ef\u17fa-\u17ff\u180f\u181a-\u181f\u1879-\u187f\u18ab-\u18af\u18f6-\u18ff\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u197f\u19ac-\u19af\u19ca-\u19cf\u19db-\u19dd\u1a1c-\u1a1d\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1aaf\u1abf-\u1aff\u1b4c-\u1b4f\u1b7d-\u1b7f\u1bf4-\u1bfb\u1c38-\u1c3a\u1c4a-\u1c4c\u1c89-\u1c8f\u1cbb-\u1cbc\u1cc8-\u1ccf\u1cfa-\u1cff\u1dfa\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u2065\u2072-\u2073\u208f\u209d-\u209f\u20c0-\u20cf\u20f1-\u20ff\u218c-\u218f\u2427-\u243f\u244b-\u245f\u2b74-\u2b75\u2b96-\u2b97\u2bc9\u2bff\u2c2f\u2c5f\u2cf4-\u2cf8\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d71-\u2d7e\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e4f-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u2fff\u3040\u3097-\u3098\u3100-\u3104\u3130\u318f\u31bb-\u31bf\u31e4-\u31ef\u321f\u32ff\u4db6-\u4dbf\u9ff0-\u9fff\ua48d-\ua48f\ua4c7-\ua4cf\ua62c-\ua63f\ua6f8-\ua6ff\ua7ba-\ua7f6\ua82c-\ua82f\ua83a-\ua83f\ua878-\ua87f\ua8c6-\ua8cd\ua8da-\ua8df\ua954-\ua95e\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaac3-\uaada\uaaf7-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f\uab66-\uab6f\uabee-\uabef\uabfa-\uabff\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\ud7ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbc2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfe-\ufdff\ufe1a-\ufe1f\ufe53\ufe67\ufe6c-\ufe6f\ufe75\ufefd-\ufefe\uff00\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufff8\ufffe-\uffff\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U0001018f\U0001019c-\U0001019f\U000101a1-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102df\U000102fc-\U000102ff\U00010324-\U0001032c\U0001034b-\U0001034f\U0001037b-\U0001037f\U0001039e\U000103c4-\U000103c7\U000103d6-\U000103ff\U0001049e-\U0001049f\U000104aa-\U000104af\U000104d4-\U000104d7\U000104fc-\U000104ff\U00010528-\U0001052f\U00010564-\U0001056e\U00010570-\U000105ff\U00010737-\U0001073f\U00010756-\U0001075f\U00010768-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856\U0001089f-\U000108a6\U000108b0-\U000108df\U000108f3\U000108f6-\U000108fa\U0001091c-\U0001091e\U0001093a-\U0001093e\U00010940-\U0001097f\U000109b8-\U000109bb\U000109d0-\U000109d1\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a36-\U00010a37\U00010a3b-\U00010a3e\U00010a49-\U00010a4f\U00010a59-\U00010a5f\U00010aa0-\U00010abf\U00010ae7-\U00010aea\U00010af7-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b92-\U00010b98\U00010b9d-\U00010ba8\U00010bb0-\U00010bff\U00010c49-\U00010c7f\U00010cb3-\U00010cbf\U00010cf3-\U00010cf9\U00010d28-\U00010d2f\U00010d3a-\U00010e5f\U00010e7f-\U00010eff\U00010f28-\U00010f2f\U00010f5a-\U00010fff\U0001104e-\U00011051\U00011070-\U0001107e\U000110c2-\U000110cc\U000110ce-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011147-\U0001114f\U00011177-\U0001117f\U000111ce-\U000111cf\U000111e0\U000111f5-\U000111ff\U00011212\U0001123f-\U0001127f\U00011287\U00011289\U0001128e\U0001129e\U000112aa-\U000112af\U000112eb-\U000112ef\U000112fa-\U000112ff\U00011304\U0001130d-\U0001130e\U00011311-\U00011312\U00011329\U00011331\U00011334\U0001133a\U00011345-\U00011346\U00011349-\U0001134a\U0001134e-\U0001134f\U00011351-\U00011356\U00011358-\U0001135c\U00011364-\U00011365\U0001136d-\U0001136f\U00011375-\U000113ff\U0001145a\U0001145c\U0001145f-\U0001147f\U000114c8-\U000114cf\U000114da-\U0001157f\U000115b6-\U000115b7\U000115de-\U000115ff\U00011645-\U0001164f\U0001165a-\U0001165f\U0001166d-\U0001167f\U000116b8-\U000116bf\U000116ca-\U000116ff\U0001171b-\U0001171c\U0001172c-\U0001172f\U00011740-\U000117ff\U0001183c-\U0001189f\U000118f3-\U000118fe\U00011900-\U000119ff\U00011a48-\U00011a4f\U00011a84-\U00011a85\U00011aa3-\U00011abf\U00011af9-\U00011bff\U00011c09\U00011c37\U00011c46-\U00011c4f\U00011c6d-\U00011c6f\U00011c90-\U00011c91\U00011ca8\U00011cb7-\U00011cff\U00011d07\U00011d0a\U00011d37-\U00011d39\U00011d3b\U00011d3e\U00011d48-\U00011d4f\U00011d5a-\U00011d5f\U00011d66\U00011d69\U00011d8f\U00011d92\U00011d99-\U00011d9f\U00011daa-\U00011edf\U00011ef9-\U00011fff\U0001239a-\U000123ff\U0001246f\U00012475-\U0001247f\U00012544-\U00012fff\U0001342f-\U000143ff\U00014647-\U000167ff\U00016a39-\U00016a3f\U00016a5f\U00016a6a-\U00016a6d\U00016a70-\U00016acf\U00016aee-\U00016aef\U00016af6-\U00016aff\U00016b46-\U00016b4f\U00016b5a\U00016b62\U00016b78-\U00016b7c\U00016b90-\U00016e3f\U00016e9b-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U00016fdf\U00016fe2-\U00016fff\U000187f2-\U000187ff\U00018af3-\U0001afff\U0001b11f-\U0001b16f\U0001b2fc-\U0001bbff\U0001bc6b-\U0001bc6f\U0001bc7d-\U0001bc7f\U0001bc89-\U0001bc8f\U0001bc9a-\U0001bc9b\U0001bca4-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d1e9-\U0001d1ff\U0001d246-\U0001d2df\U0001d2f4-\U0001d2ff\U0001d357-\U0001d35f\U0001d379-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d7cc-\U0001d7cd\U0001da8c-\U0001da9a\U0001daa0\U0001dab0-\U0001dfff\U0001e007\U0001e019-\U0001e01a\U0001e022\U0001e025\U0001e02b-\U0001e7ff\U0001e8c5-\U0001e8c6\U0001e8d7-\U0001e8ff\U0001e94b-\U0001e94f\U0001e95a-\U0001e95d\U0001e960-\U0001ec70\U0001ecb5-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0c0\U0001f0d0\U0001f0f6-\U0001f0ff\U0001f10d-\U0001f10f\U0001f16c-\U0001f16f\U0001f1ad-\U0001f1e5\U0001f203-\U0001f20f\U0001f23c-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f25f\U0001f266-\U0001f2ff\U0001f6d5-\U0001f6df\U0001f6ed-\U0001f6ef\U0001f6fa-\U0001f6ff\U0001f774-\U0001f77f\U0001f7d9-\U0001f7ff\U0001f80c-\U0001f80f\U0001f848-\U0001f84f\U0001f85a-\U0001f85f\U0001f888-\U0001f88f\U0001f8ae-\U0001f8ff\U0001f90c-\U0001f90f\U0001f93f\U0001f971-\U0001f972\U0001f977-\U0001f979\U0001f97b\U0001f9a3-\U0001f9af\U0001f9ba-\U0001f9bf\U0001f9c3-\U0001f9cf\U0001fa00-\U0001fa5f\U0001fa6e-\U0001ffff\U0002a6d7-\U0002a6ff\U0002b735-\U0002b73f\U0002b81e-\U0002b81f\U0002cea2-\U0002ceaf\U0002ebe1-\U0002f7ff\U0002fa1e-\U000e0000\U000e0002-\U000e001f\U000e0080-\U000e00ff\U000e01f0-\U000effff\U000ffffe-\U000fffff\U0010fffe-\U0010ffff'
-Co = '\ue000-\uf8ff\U000f0000-\U000ffffd\U00100000-\U0010fffd'
+Co = '\ue000-\uf8ff\U000f0000-\U000ffffd\U00100000-\U0010fffd'
-Cs = '\ud800-\udbff\\\udc00\udc01-\udfff'
+Cs = '\ud800-\udbff\\\udc00\udc01-\udfff'
-Ll = 'a-z\xb5\xdf-\xf6\xf8-\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0529\u052b\u052d\u052f\u0560-\u0588\u10d0-\u10fa\u10fd-\u10ff\u13f8-\u13fd\u1c80-\u1c88\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua699\ua69b\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793-\ua795\ua797\ua799\ua79b\ua79d\ua79f\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7af\ua7b5\ua7b7\ua7b9\ua7fa\uab30-\uab5a\uab60-\uab65\uab70-\uabbf\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\U00010428-\U0001044f\U000104d8-\U000104fb\U00010cc0-\U00010cf2\U000118c0-\U000118df\U00016e60-\U00016e7f\U0001d41a-\U0001d433\U0001d44e-\U0001d454\U0001d456-\U0001d467\U0001d482-\U0001d49b\U0001d4b6-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d4cf\U0001d4ea-\U0001d503\U0001d51e-\U0001d537\U0001d552-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a5\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6e1\U0001d6fc-\U0001d714\U0001d716-\U0001d71b\U0001d736-\U0001d74e\U0001d750-\U0001d755\U0001d770-\U0001d788\U0001d78a-\U0001d78f\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7c9\U0001d7cb\U0001e922-\U0001e943'
+Ll = 'a-z\xb5\xdf-\xf6\xf8-\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0529\u052b\u052d\u052f\u0560-\u0588\u10d0-\u10fa\u10fd-\u10ff\u13f8-\u13fd\u1c80-\u1c88\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua699\ua69b\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793-\ua795\ua797\ua799\ua79b\ua79d\ua79f\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7af\ua7b5\ua7b7\ua7b9\ua7fa\uab30-\uab5a\uab60-\uab65\uab70-\uabbf\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\U00010428-\U0001044f\U000104d8-\U000104fb\U00010cc0-\U00010cf2\U000118c0-\U000118df\U00016e60-\U00016e7f\U0001d41a-\U0001d433\U0001d44e-\U0001d454\U0001d456-\U0001d467\U0001d482-\U0001d49b\U0001d4b6-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d4cf\U0001d4ea-\U0001d503\U0001d51e-\U0001d537\U0001d552-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a5\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6e1\U0001d6fc-\U0001d714\U0001d716-\U0001d71b\U0001d736-\U0001d74e\U0001d750-\U0001d755\U0001d770-\U0001d788\U0001d78a-\U0001d78f\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7c9\U0001d7cb\U0001e922-\U0001e943'
-Lm = '\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua69c-\ua69d\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\ua9e6\uaa70\uaadd\uaaf3-\uaaf4\uab5c-\uab5f\uff70\uff9e-\uff9f\U00016b40-\U00016b43\U00016f93-\U00016f9f\U00016fe0-\U00016fe1'
+Lm = '\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua69c-\ua69d\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\ua9e6\uaa70\uaadd\uaaf3-\uaaf4\uab5c-\uab5f\uff70\uff9e-\uff9f\U00016b40-\U00016b43\U00016f93-\U00016f9f\U00016fe0-\U00016fe1'
-Lo = '\xaa\xba\u01bb\u01c0-\u01c3\u0294\u05d0-\u05ea\u05ef-\u05f2\u0620-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0800-\u0815\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2-\u0eb3\u0ebd\u0ec0-\u0ec4\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u1100-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16f1-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u1878\u1880-\u1884\u1887-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u2135-\u2138\u2d30-\u2d67\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3006\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua014\ua016-\ua48c\ua4d0-\ua4f7\ua500-\ua60b\ua610-\ua61f\ua62a-\ua62b\ua66e\ua6a0-\ua6e5\ua78f\ua7f7\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9e0-\ua9e4\ua9e7-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa6f\uaa71-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadc\uaae0-\uaaea\uaaf2\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff66-\uff6f\uff71-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U00010340\U00010342-\U00010349\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U00010450-\U0001049d\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016f00-\U00016f44\U00016f50\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001e800-\U0001e8c4\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
+Lo = '\xaa\xba\u01bb\u01c0-\u01c3\u0294\u05d0-\u05ea\u05ef-\u05f2\u0620-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0800-\u0815\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2-\u0eb3\u0ebd\u0ec0-\u0ec4\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u1100-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16f1-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u1878\u1880-\u1884\u1887-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u2135-\u2138\u2d30-\u2d67\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3006\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua014\ua016-\ua48c\ua4d0-\ua4f7\ua500-\ua60b\ua610-\ua61f\ua62a-\ua62b\ua66e\ua6a0-\ua6e5\ua78f\ua7f7\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9e0-\ua9e4\ua9e7-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa6f\uaa71-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadc\uaae0-\uaaea\uaaf2\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff66-\uff6f\uff71-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U00010340\U00010342-\U00010349\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U00010450-\U0001049d\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016f00-\U00016f44\U00016f50\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001e800-\U0001e8c4\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
-Lt = '\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc'
+Lt = '\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc'
-Lu = 'A-Z\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u037f\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0528\u052a\u052c\u052e\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u13a0-\u13f5\u1c90-\u1cba\u1cbd-\u1cbf\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua698\ua69a\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua796\ua798\ua79a\ua79c\ua79e\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa-\ua7ae\ua7b0-\ua7b4\ua7b6\ua7b8\uff21-\uff3a\U00010400-\U00010427\U000104b0-\U000104d3\U00010c80-\U00010cb2\U000118a0-\U000118bf\U00016e40-\U00016e5f\U0001d400-\U0001d419\U0001d434-\U0001d44d\U0001d468-\U0001d481\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b5\U0001d4d0-\U0001d4e9\U0001d504-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d538-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a8-\U0001d6c0\U0001d6e2-\U0001d6fa\U0001d71c-\U0001d734\U0001d756-\U0001d76e\U0001d790-\U0001d7a8\U0001d7ca\U0001e900-\U0001e921'
+Lu = 'A-Z\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u037f\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0528\u052a\u052c\u052e\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u13a0-\u13f5\u1c90-\u1cba\u1cbd-\u1cbf\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua698\ua69a\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua796\ua798\ua79a\ua79c\ua79e\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa-\ua7ae\ua7b0-\ua7b4\ua7b6\ua7b8\uff21-\uff3a\U00010400-\U00010427\U000104b0-\U000104d3\U00010c80-\U00010cb2\U000118a0-\U000118bf\U00016e40-\U00016e5f\U0001d400-\U0001d419\U0001d434-\U0001d44d\U0001d468-\U0001d481\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b5\U0001d4d0-\U0001d4e9\U0001d504-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d538-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a8-\U0001d6c0\U0001d6e2-\U0001d6fa\U0001d71c-\U0001d734\U0001d756-\U0001d76e\U0001d790-\U0001d7a8\U0001d7ca\U0001e900-\U0001e921'
-Mc = '\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u1cf7\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaa7d\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec\U00011000\U00011002\U00011082\U000110b0-\U000110b2\U000110b7-\U000110b8\U0001112c\U00011145-\U00011146\U00011182\U000111b3-\U000111b5\U000111bf-\U000111c0\U0001122c-\U0001122e\U00011232-\U00011233\U00011235\U000112e0-\U000112e2\U00011302-\U00011303\U0001133e-\U0001133f\U00011341-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011357\U00011362-\U00011363\U00011435-\U00011437\U00011440-\U00011441\U00011445\U000114b0-\U000114b2\U000114b9\U000114bb-\U000114be\U000114c1\U000115af-\U000115b1\U000115b8-\U000115bb\U000115be\U00011630-\U00011632\U0001163b-\U0001163c\U0001163e\U000116ac\U000116ae-\U000116af\U000116b6\U00011720-\U00011721\U00011726\U0001182c-\U0001182e\U00011838\U00011a39\U00011a57-\U00011a58\U00011a97\U00011c2f\U00011c3e\U00011ca9\U00011cb1\U00011cb4\U00011d8a-\U00011d8e\U00011d93-\U00011d94\U00011d96\U00011ef5-\U00011ef6\U00016f51-\U00016f7e\U0001d165-\U0001d166\U0001d16d-\U0001d172'
+Mc = '\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u1cf7\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaa7d\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec\U00011000\U00011002\U00011082\U000110b0-\U000110b2\U000110b7-\U000110b8\U0001112c\U00011145-\U00011146\U00011182\U000111b3-\U000111b5\U000111bf-\U000111c0\U0001122c-\U0001122e\U00011232-\U00011233\U00011235\U000112e0-\U000112e2\U00011302-\U00011303\U0001133e-\U0001133f\U00011341-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011357\U00011362-\U00011363\U00011435-\U00011437\U00011440-\U00011441\U00011445\U000114b0-\U000114b2\U000114b9\U000114bb-\U000114be\U000114c1\U000115af-\U000115b1\U000115b8-\U000115bb\U000115be\U00011630-\U00011632\U0001163b-\U0001163c\U0001163e\U000116ac\U000116ae-\U000116af\U000116b6\U00011720-\U00011721\U00011726\U0001182c-\U0001182e\U00011838\U00011a39\U00011a57-\U00011a58\U00011a97\U00011c2f\U00011c3e\U00011ca9\U00011cb1\U00011cb4\U00011d8a-\U00011d8e\U00011d93-\U00011d94\U00011d96\U00011ef5-\U00011ef6\U00016f51-\U00016f7e\U0001d165-\U0001d166\U0001d16d-\U0001d172'
-Me = '\u0488-\u0489\u1abe\u20dd-\u20e0\u20e2-\u20e4\ua670-\ua672'
+Me = '\u0488-\u0489\u1abe\u20dd-\u20e0\u20e2-\u20e4\ua670-\ua672'
-Mn = '\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d3-\u08e1\u08e3-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u09fe\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0afa-\u0aff\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c00\u0c04\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0c81\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d00-\u0d01\u0d3b-\u0d3c\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u1885-\u1886\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1ab0-\u1abd\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab-\u1bad\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1cf8-\u1cf9\u1dc0-\u1df9\u1dfb-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69e-\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4-\ua8c5\ua8e0-\ua8f1\ua8ff\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\ua9e5\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaa7c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\U000101fd\U000102e0\U00010376-\U0001037a\U00010a01-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a0f\U00010a38-\U00010a3a\U00010a3f\U00010ae5-\U00010ae6\U00010d24-\U00010d27\U00010f46-\U00010f50\U00011001\U00011038-\U00011046\U0001107f-\U00011081\U000110b3-\U000110b6\U000110b9-\U000110ba\U00011100-\U00011102\U00011127-\U0001112b\U0001112d-\U00011134\U00011173\U00011180-\U00011181\U000111b6-\U000111be\U000111c9-\U000111cc\U0001122f-\U00011231\U00011234\U00011236-\U00011237\U0001123e\U000112df\U000112e3-\U000112ea\U00011300-\U00011301\U0001133b-\U0001133c\U00011340\U00011366-\U0001136c\U00011370-\U00011374\U00011438-\U0001143f\U00011442-\U00011444\U00011446\U0001145e\U000114b3-\U000114b8\U000114ba\U000114bf-\U000114c0\U000114c2-\U000114c3\U000115b2-\U000115b5\U000115bc-\U000115bd\U000115bf-\U000115c0\U000115dc-\U000115dd\U00011633-\U0001163a\U0001163d\U0001163f-\U00011640\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7\U0001171d-\U0001171f\U00011722-\U00011725\U00011727-\U0001172b\U0001182f-\U00011837\U00011839-\U0001183a\U00011a01-\U00011a0a\U00011a33-\U00011a38\U00011a3b-\U00011a3e\U00011a47\U00011a51-\U00011a56\U00011a59-\U00011a5b\U00011a8a-\U00011a96\U00011a98-\U00011a99\U00011c30-\U00011c36\U00011c38-\U00011c3d\U00011c3f\U00011c92-\U00011ca7\U00011caa-\U00011cb0\U00011cb2-\U00011cb3\U00011cb5-\U00011cb6\U00011d31-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d45\U00011d47\U00011d90-\U00011d91\U00011d95\U00011d97\U00011ef3-\U00011ef4\U00016af0-\U00016af4\U00016b30-\U00016b36\U00016f8f-\U00016f92\U0001bc9d-\U0001bc9e\U0001d167-\U0001d169\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e8d0-\U0001e8d6\U0001e944-\U0001e94a\U000e0100-\U000e01ef'
+Mn = '\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d3-\u08e1\u08e3-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u09fe\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0afa-\u0aff\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c00\u0c04\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0c81\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d00-\u0d01\u0d3b-\u0d3c\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u1885-\u1886\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1ab0-\u1abd\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab-\u1bad\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1cf8-\u1cf9\u1dc0-\u1df9\u1dfb-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69e-\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4-\ua8c5\ua8e0-\ua8f1\ua8ff\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\ua9e5\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaa7c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\U000101fd\U000102e0\U00010376-\U0001037a\U00010a01-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a0f\U00010a38-\U00010a3a\U00010a3f\U00010ae5-\U00010ae6\U00010d24-\U00010d27\U00010f46-\U00010f50\U00011001\U00011038-\U00011046\U0001107f-\U00011081\U000110b3-\U000110b6\U000110b9-\U000110ba\U00011100-\U00011102\U00011127-\U0001112b\U0001112d-\U00011134\U00011173\U00011180-\U00011181\U000111b6-\U000111be\U000111c9-\U000111cc\U0001122f-\U00011231\U00011234\U00011236-\U00011237\U0001123e\U000112df\U000112e3-\U000112ea\U00011300-\U00011301\U0001133b-\U0001133c\U00011340\U00011366-\U0001136c\U00011370-\U00011374\U00011438-\U0001143f\U00011442-\U00011444\U00011446\U0001145e\U000114b3-\U000114b8\U000114ba\U000114bf-\U000114c0\U000114c2-\U000114c3\U000115b2-\U000115b5\U000115bc-\U000115bd\U000115bf-\U000115c0\U000115dc-\U000115dd\U00011633-\U0001163a\U0001163d\U0001163f-\U00011640\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7\U0001171d-\U0001171f\U00011722-\U00011725\U00011727-\U0001172b\U0001182f-\U00011837\U00011839-\U0001183a\U00011a01-\U00011a0a\U00011a33-\U00011a38\U00011a3b-\U00011a3e\U00011a47\U00011a51-\U00011a56\U00011a59-\U00011a5b\U00011a8a-\U00011a96\U00011a98-\U00011a99\U00011c30-\U00011c36\U00011c38-\U00011c3d\U00011c3f\U00011c92-\U00011ca7\U00011caa-\U00011cb0\U00011cb2-\U00011cb3\U00011cb5-\U00011cb6\U00011d31-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d45\U00011d47\U00011d90-\U00011d91\U00011d95\U00011d97\U00011ef3-\U00011ef4\U00016af0-\U00016af4\U00016b30-\U00016b36\U00016f8f-\U00016f92\U0001bc9d-\U0001bc9e\U0001d167-\U0001d169\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e8d0-\U0001e8d6\U0001e944-\U0001e94a\U000e0100-\U000e01ef'
-Nd = '0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0de6-\u0def\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\ua9f0-\ua9f9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19\U000104a0-\U000104a9\U00010d30-\U00010d39\U00011066-\U0001106f\U000110f0-\U000110f9\U00011136-\U0001113f\U000111d0-\U000111d9\U000112f0-\U000112f9\U00011450-\U00011459\U000114d0-\U000114d9\U00011650-\U00011659\U000116c0-\U000116c9\U00011730-\U00011739\U000118e0-\U000118e9\U00011c50-\U00011c59\U00011d50-\U00011d59\U00011da0-\U00011da9\U00016a60-\U00016a69\U00016b50-\U00016b59\U0001d7ce-\U0001d7ff\U0001e950-\U0001e959'
+Nd = '0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0de6-\u0def\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\ua9f0-\ua9f9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19\U000104a0-\U000104a9\U00010d30-\U00010d39\U00011066-\U0001106f\U000110f0-\U000110f9\U00011136-\U0001113f\U000111d0-\U000111d9\U000112f0-\U000112f9\U00011450-\U00011459\U000114d0-\U000114d9\U00011650-\U00011659\U000116c0-\U000116c9\U00011730-\U00011739\U000118e0-\U000118e9\U00011c50-\U00011c59\U00011d50-\U00011d59\U00011da0-\U00011da9\U00016a60-\U00016a69\U00016b50-\U00016b59\U0001d7ce-\U0001d7ff\U0001e950-\U0001e959'
-Nl = '\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef\U00010140-\U00010174\U00010341\U0001034a\U000103d1-\U000103d5\U00012400-\U0001246e'
+Nl = '\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef\U00010140-\U00010174\U00010341\U0001034a\U000103d1-\U000103d5\U00012400-\U0001246e'
-No = '\xb2-\xb3\xb9\xbc-\xbe\u09f4-\u09f9\u0b72-\u0b77\u0bf0-\u0bf2\u0c78-\u0c7e\u0d58-\u0d5e\u0d70-\u0d78\u0f2a-\u0f33\u1369-\u137c\u17f0-\u17f9\u19da\u2070\u2074-\u2079\u2080-\u2089\u2150-\u215f\u2189\u2460-\u249b\u24ea-\u24ff\u2776-\u2793\u2cfd\u3192-\u3195\u3220-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\ua830-\ua835\U00010107-\U00010133\U00010175-\U00010178\U0001018a-\U0001018b\U000102e1-\U000102fb\U00010320-\U00010323\U00010858-\U0001085f\U00010879-\U0001087f\U000108a7-\U000108af\U000108fb-\U000108ff\U00010916-\U0001091b\U000109bc-\U000109bd\U000109c0-\U000109cf\U000109d2-\U000109ff\U00010a40-\U00010a48\U00010a7d-\U00010a7e\U00010a9d-\U00010a9f\U00010aeb-\U00010aef\U00010b58-\U00010b5f\U00010b78-\U00010b7f\U00010ba9-\U00010baf\U00010cfa-\U00010cff\U00010e60-\U00010e7e\U00010f1d-\U00010f26\U00010f51-\U00010f54\U00011052-\U00011065\U000111e1-\U000111f4\U0001173a-\U0001173b\U000118ea-\U000118f2\U00011c5a-\U00011c6c\U00016b5b-\U00016b61\U00016e80-\U00016e96\U0001d2e0-\U0001d2f3\U0001d360-\U0001d378\U0001e8c7-\U0001e8cf\U0001ec71-\U0001ecab\U0001ecad-\U0001ecaf\U0001ecb1-\U0001ecb4\U0001f100-\U0001f10c'
+No = '\xb2-\xb3\xb9\xbc-\xbe\u09f4-\u09f9\u0b72-\u0b77\u0bf0-\u0bf2\u0c78-\u0c7e\u0d58-\u0d5e\u0d70-\u0d78\u0f2a-\u0f33\u1369-\u137c\u17f0-\u17f9\u19da\u2070\u2074-\u2079\u2080-\u2089\u2150-\u215f\u2189\u2460-\u249b\u24ea-\u24ff\u2776-\u2793\u2cfd\u3192-\u3195\u3220-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\ua830-\ua835\U00010107-\U00010133\U00010175-\U00010178\U0001018a-\U0001018b\U000102e1-\U000102fb\U00010320-\U00010323\U00010858-\U0001085f\U00010879-\U0001087f\U000108a7-\U000108af\U000108fb-\U000108ff\U00010916-\U0001091b\U000109bc-\U000109bd\U000109c0-\U000109cf\U000109d2-\U000109ff\U00010a40-\U00010a48\U00010a7d-\U00010a7e\U00010a9d-\U00010a9f\U00010aeb-\U00010aef\U00010b58-\U00010b5f\U00010b78-\U00010b7f\U00010ba9-\U00010baf\U00010cfa-\U00010cff\U00010e60-\U00010e7e\U00010f1d-\U00010f26\U00010f51-\U00010f54\U00011052-\U00011065\U000111e1-\U000111f4\U0001173a-\U0001173b\U000118ea-\U000118f2\U00011c5a-\U00011c6c\U00016b5b-\U00016b61\U00016e80-\U00016e96\U0001d2e0-\U0001d2f3\U0001d360-\U0001d378\U0001e8c7-\U0001e8cf\U0001ec71-\U0001ecab\U0001ecad-\U0001ecaf\U0001ecb1-\U0001ecb4\U0001f100-\U0001f10c'
-Pc = '_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f'
+Pc = '_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f'
-Pd = '\\-\u058a\u05be\u1400\u1806\u2010-\u2015\u2e17\u2e1a\u2e3a-\u2e3b\u2e40\u301c\u3030\u30a0\ufe31-\ufe32\ufe58\ufe63\uff0d'
+Pd = '\\-\u058a\u05be\u1400\u1806\u2010-\u2015\u2e17\u2e1a\u2e3a-\u2e3b\u2e40\u301c\u3030\u30a0\ufe31-\ufe32\ufe58\ufe63\uff0d'
-Pe = ')\\]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u2309\u230b\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e-\u301f\ufd3e\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63'
+Pe = ')\\]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u2309\u230b\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e-\u301f\ufd3e\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63'
-Pf = '\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21'
+Pf = '\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21'
-Pi = '\xab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20'
+Pi = '\xab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20'
-Po = "!-#%-'*,.-/:-;?-@\\\\\xa1\xa7\xb6-\xb7\xbf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3-\u05f4\u0609-\u060a\u060c-\u060d\u061b\u061e-\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964-\u0965\u0970\u09fd\u0a76\u0af0\u0c84\u0df4\u0e4f\u0e5a-\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9-\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d-\u166e\u16eb-\u16ed\u1735-\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944-\u1945\u1a1e-\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e-\u1c7f\u1cc0-\u1cc7\u1cd3\u2016-\u2017\u2020-\u2027\u2030-\u2038\u203b-\u203e\u2041-\u2043\u2047-\u2051\u2053\u2055-\u205e\u2cf9-\u2cfc\u2cfe-\u2cff\u2d70\u2e00-\u2e01\u2e06-\u2e08\u2e0b\u2e0e-\u2e16\u2e18-\u2e19\u2e1b\u2e1e-\u2e1f\u2e2a-\u2e2e\u2e30-\u2e39\u2e3c-\u2e3f\u2e41\u2e43-\u2e4e\u3001-\u3003\u303d\u30fb\ua4fe-\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce-\ua8cf\ua8f8-\ua8fa\ua8fc\ua92e-\ua92f\ua95f\ua9c1-\ua9cd\ua9de-\ua9df\uaa5c-\uaa5f\uaade-\uaadf\uaaf0-\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45-\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a-\ufe6b\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e-\uff0f\uff1a-\uff1b\uff1f-\uff20\uff3c\uff61\uff64-\uff65\U00010100-\U00010102\U0001039f\U000103d0\U0001056f\U00010857\U0001091f\U0001093f\U00010a50-\U00010a58\U00010a7f\U00010af0-\U00010af6\U00010b39-\U00010b3f\U00010b99-\U00010b9c\U00010f55-\U00010f59\U00011047-\U0001104d\U000110bb-\U000110bc\U000110be-\U000110c1\U00011140-\U00011143\U00011174-\U00011175\U000111c5-\U000111c8\U000111cd\U000111db\U000111dd-\U000111df\U00011238-\U0001123d\U000112a9\U0001144b-\U0001144f\U0001145b\U0001145d\U000114c6\U000115c1-\U000115d7\U00011641-\U00011643\U00011660-\U0001166c\U0001173c-\U0001173e\U0001183b\U00011a3f-\U00011a46\U00011a9a-\U00011a9c\U00011a9e-\U00011aa2\U00011c41-\U00011c45\U00011c70-\U00011c71\U00011ef7-\U00011ef8\U00012470-\U00012474\U00016a6e-\U00016a6f\U00016af5\U00016b37-\U00016b3b\U00016b44\U00016e97-\U00016e9a\U0001bc9f\U0001da87-\U0001da8b\U0001e95e-\U0001e95f"
+Po = "!-#%-'*,.-/:-;?-@\\\\\xa1\xa7\xb6-\xb7\xbf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3-\u05f4\u0609-\u060a\u060c-\u060d\u061b\u061e-\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964-\u0965\u0970\u09fd\u0a76\u0af0\u0c84\u0df4\u0e4f\u0e5a-\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9-\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d-\u166e\u16eb-\u16ed\u1735-\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944-\u1945\u1a1e-\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e-\u1c7f\u1cc0-\u1cc7\u1cd3\u2016-\u2017\u2020-\u2027\u2030-\u2038\u203b-\u203e\u2041-\u2043\u2047-\u2051\u2053\u2055-\u205e\u2cf9-\u2cfc\u2cfe-\u2cff\u2d70\u2e00-\u2e01\u2e06-\u2e08\u2e0b\u2e0e-\u2e16\u2e18-\u2e19\u2e1b\u2e1e-\u2e1f\u2e2a-\u2e2e\u2e30-\u2e39\u2e3c-\u2e3f\u2e41\u2e43-\u2e4e\u3001-\u3003\u303d\u30fb\ua4fe-\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce-\ua8cf\ua8f8-\ua8fa\ua8fc\ua92e-\ua92f\ua95f\ua9c1-\ua9cd\ua9de-\ua9df\uaa5c-\uaa5f\uaade-\uaadf\uaaf0-\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45-\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a-\ufe6b\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e-\uff0f\uff1a-\uff1b\uff1f-\uff20\uff3c\uff61\uff64-\uff65\U00010100-\U00010102\U0001039f\U000103d0\U0001056f\U00010857\U0001091f\U0001093f\U00010a50-\U00010a58\U00010a7f\U00010af0-\U00010af6\U00010b39-\U00010b3f\U00010b99-\U00010b9c\U00010f55-\U00010f59\U00011047-\U0001104d\U000110bb-\U000110bc\U000110be-\U000110c1\U00011140-\U00011143\U00011174-\U00011175\U000111c5-\U000111c8\U000111cd\U000111db\U000111dd-\U000111df\U00011238-\U0001123d\U000112a9\U0001144b-\U0001144f\U0001145b\U0001145d\U000114c6\U000115c1-\U000115d7\U00011641-\U00011643\U00011660-\U0001166c\U0001173c-\U0001173e\U0001183b\U00011a3f-\U00011a46\U00011a9a-\U00011a9c\U00011a9e-\U00011aa2\U00011c41-\U00011c45\U00011c70-\U00011c71\U00011ef7-\U00011ef8\U00012470-\U00012474\U00016a6e-\U00016a6f\U00016af5\U00016b37-\U00016b3b\U00016b44\U00016e97-\U00016e9a\U0001bc9f\U0001da87-\U0001da8b\U0001e95e-\U0001e95f"
-Ps = '(\\[{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2308\u230a\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u2e42\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3f\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62'
+Ps = '(\\[{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2308\u230a\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u2e42\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3f\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62'
-Sc = '$\xa2-\xa5\u058f\u060b\u07fe-\u07ff\u09f2-\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0-\u20bf\ua838\ufdfc\ufe69\uff04\uffe0-\uffe1\uffe5-\uffe6\U0001ecb0'
+Sc = '$\xa2-\xa5\u058f\u060b\u07fe-\u07ff\u09f2-\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0-\u20bf\ua838\ufdfc\ufe69\uff04\uffe0-\uffe1\uffe5-\uffe6\U0001ecb0'
-Sk = '\\^`\xa8\xaf\xb4\xb8\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u02ff\u0375\u0384-\u0385\u1fbd\u1fbf-\u1fc1\u1fcd-\u1fcf\u1fdd-\u1fdf\u1fed-\u1fef\u1ffd-\u1ffe\u309b-\u309c\ua700-\ua716\ua720-\ua721\ua789-\ua78a\uab5b\ufbb2-\ufbc1\uff3e\uff40\uffe3\U0001f3fb-\U0001f3ff'
+Sk = '\\^`\xa8\xaf\xb4\xb8\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u02ff\u0375\u0384-\u0385\u1fbd\u1fbf-\u1fc1\u1fcd-\u1fcf\u1fdd-\u1fdf\u1fed-\u1fef\u1ffd-\u1ffe\u309b-\u309c\ua700-\ua716\ua720-\ua721\ua789-\ua78a\uab5b\ufbb2-\ufbc1\uff3e\uff40\uffe3\U0001f3fb-\U0001f3ff'
-Sm = '+<->|~\xac\xb1\xd7\xf7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1'
+Sm = '+<->|~\xac\xb1\xd7\xf7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1'
-So = '\xa6\xa9\xae\xb0\u0482\u058d-\u058e\u060e-\u060f\u06de\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0d4f\u0d79\u0f01-\u0f03\u0f13\u0f15-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcc\u0fce-\u0fcf\u0fd5-\u0fd8\u109e-\u109f\u1390-\u1399\u1940\u19de-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2117\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u214a\u214c-\u214d\u214f\u218a-\u218b\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u2328\u232b-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u2426\u2440-\u244a\u249c-\u24e9\u2500-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u2767\u2794-\u27bf\u2800-\u28ff\u2b00-\u2b2f\u2b45-\u2b46\u2b4d-\u2b73\u2b76-\u2b95\u2b98-\u2bc8\u2bca-\u2bfe\u2ce5-\u2cea\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u32fe\u3300-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ua836-\ua837\ua839\uaa77-\uaa79\ufdfd\uffe4\uffe8\uffed-\uffee\ufffc-\ufffd\U00010137-\U0001013f\U00010179-\U00010189\U0001018c-\U0001018e\U00010190-\U0001019b\U000101a0\U000101d0-\U000101fc\U00010877-\U00010878\U00010ac8\U0001173f\U00016b3c-\U00016b3f\U00016b45\U0001bc9c\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d164\U0001d16a-\U0001d16c\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d1e8\U0001d200-\U0001d241\U0001d245\U0001d300-\U0001d356\U0001d800-\U0001d9ff\U0001da37-\U0001da3a\U0001da6d-\U0001da74\U0001da76-\U0001da83\U0001da85-\U0001da86\U0001ecac\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0bf\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0f5\U0001f110-\U0001f16b\U0001f170-\U0001f1ac\U0001f1e6-\U0001f202\U0001f210-\U0001f23b\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f260-\U0001f265\U0001f300-\U0001f3fa\U0001f400-\U0001f6d4\U0001f6e0-\U0001f6ec\U0001f6f0-\U0001f6f9\U0001f700-\U0001f773\U0001f780-\U0001f7d8\U0001f800-\U0001f80b\U0001f810-\U0001f847\U0001f850-\U0001f859\U0001f860-\U0001f887\U0001f890-\U0001f8ad\U0001f900-\U0001f90b\U0001f910-\U0001f93e\U0001f940-\U0001f970\U0001f973-\U0001f976\U0001f97a\U0001f97c-\U0001f9a2\U0001f9b0-\U0001f9b9\U0001f9c0-\U0001f9c2\U0001f9d0-\U0001f9ff\U0001fa60-\U0001fa6d'
+So = '\xa6\xa9\xae\xb0\u0482\u058d-\u058e\u060e-\u060f\u06de\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0d4f\u0d79\u0f01-\u0f03\u0f13\u0f15-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcc\u0fce-\u0fcf\u0fd5-\u0fd8\u109e-\u109f\u1390-\u1399\u1940\u19de-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2117\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u214a\u214c-\u214d\u214f\u218a-\u218b\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u2328\u232b-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u2426\u2440-\u244a\u249c-\u24e9\u2500-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u2767\u2794-\u27bf\u2800-\u28ff\u2b00-\u2b2f\u2b45-\u2b46\u2b4d-\u2b73\u2b76-\u2b95\u2b98-\u2bc8\u2bca-\u2bfe\u2ce5-\u2cea\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u32fe\u3300-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ua836-\ua837\ua839\uaa77-\uaa79\ufdfd\uffe4\uffe8\uffed-\uffee\ufffc-\ufffd\U00010137-\U0001013f\U00010179-\U00010189\U0001018c-\U0001018e\U00010190-\U0001019b\U000101a0\U000101d0-\U000101fc\U00010877-\U00010878\U00010ac8\U0001173f\U00016b3c-\U00016b3f\U00016b45\U0001bc9c\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d164\U0001d16a-\U0001d16c\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d1e8\U0001d200-\U0001d241\U0001d245\U0001d300-\U0001d356\U0001d800-\U0001d9ff\U0001da37-\U0001da3a\U0001da6d-\U0001da74\U0001da76-\U0001da83\U0001da85-\U0001da86\U0001ecac\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0bf\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0f5\U0001f110-\U0001f16b\U0001f170-\U0001f1ac\U0001f1e6-\U0001f202\U0001f210-\U0001f23b\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f260-\U0001f265\U0001f300-\U0001f3fa\U0001f400-\U0001f6d4\U0001f6e0-\U0001f6ec\U0001f6f0-\U0001f6f9\U0001f700-\U0001f773\U0001f780-\U0001f7d8\U0001f800-\U0001f80b\U0001f810-\U0001f847\U0001f850-\U0001f859\U0001f860-\U0001f887\U0001f890-\U0001f8ad\U0001f900-\U0001f90b\U0001f910-\U0001f93e\U0001f940-\U0001f970\U0001f973-\U0001f976\U0001f97a\U0001f97c-\U0001f9a2\U0001f9b0-\U0001f9b9\U0001f9c0-\U0001f9c2\U0001f9d0-\U0001f9ff\U0001fa60-\U0001fa6d'
-Zl = '\u2028'
+Zl = '\u2028'
-Zp = '\u2029'
+Zp = '\u2029'
-Zs = ' \xa0\u1680\u2000-\u200a\u202f\u205f\u3000'
+Zs = ' \xa0\u1680\u2000-\u200a\u202f\u205f\u3000'
-xid_continue = '0-9A-Z_a-z\xaa\xb5\xb7\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u0483-\u0487\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u05d0-\u05ea\u05ef-\u05f2\u0610-\u061a\u0620-\u0669\u066e-\u06d3\u06d5-\u06dc\u06df-\u06e8\u06ea-\u06fc\u06ff\u0710-\u074a\u074d-\u07b1\u07c0-\u07f5\u07fa\u07fd\u0800-\u082d\u0840-\u085b\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u08d3-\u08e1\u08e3-\u0963\u0966-\u096f\u0971-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc-\u09c4\u09c7-\u09c8\u09cb-\u09ce\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09f1\u09fc\u09fe\u0a01-\u0a03\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a59-\u0a5c\u0a5e\u0a66-\u0a75\u0a81-\u0a83\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0-\u0ae3\u0ae6-\u0aef\u0af9-\u0aff\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3c-\u0b44\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b63\u0b66-\u0b6f\u0b71\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd0\u0bd7\u0be6-\u0bef\u0c00-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c58-\u0c5a\u0c60-\u0c63\u0c66-\u0c6f\u0c80-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbc-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce3\u0ce6-\u0cef\u0cf1-\u0cf2\u0d00-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d44\u0d46-\u0d48\u0d4a-\u0d4e\u0d54-\u0d57\u0d5f-\u0d63\u0d66-\u0d6f\u0d7a-\u0d7f\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2-\u0df3\u0e01-\u0e3a\u0e40-\u0e4e\u0e50-\u0e59\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edf\u0f00\u0f18-\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e-\u0f47\u0f49-\u0f6c\u0f71-\u0f84\u0f86-\u0f97\u0f99-\u0fbc\u0fc6\u1000-\u1049\u1050-\u109d\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u135d-\u135f\u1369-\u1371\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u1780-\u17d3\u17d7\u17dc-\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u1820-\u1878\u1880-\u18aa\u18b0-\u18f5\u1900-\u191e\u1920-\u192b\u1930-\u193b\u1946-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u19d0-\u19da\u1a00-\u1a1b\u1a20-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa7\u1ab0-\u1abd\u1b00-\u1b4b\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1bf3\u1c00-\u1c37\u1c40-\u1c49\u1c4d-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1cd0-\u1cd2\u1cd4-\u1cf9\u1d00-\u1df9\u1dfb-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u203f-\u2040\u2054\u2071\u207f\u2090-\u209c\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d7f-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2de0-\u2dff\u3005-\u3007\u3021-\u302f\u3031-\u3035\u3038-\u303c\u3041-\u3096\u3099-\u309a\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua62b\ua640-\ua66f\ua674-\ua67d\ua67f-\ua6f1\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua827\ua840-\ua873\ua880-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f7\ua8fb\ua8fd-\ua92d\ua930-\ua953\ua960-\ua97c\ua980-\ua9c0\ua9cf-\ua9d9\ua9e0-\ua9fe\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa60-\uaa76\uaa7a-\uaac2\uaadb-\uaadd\uaae0-\uaaef\uaaf2-\uaaf6\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabea\uabec-\uabed\uabf0-\uabf9\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe00-\ufe0f\ufe20-\ufe2f\ufe33-\ufe34\ufe4d-\ufe4f\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff10-\uff19\uff21-\uff3a\uff3f\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U000102e0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U0001037a\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104a0-\U000104a9\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a38-\U00010a3a\U00010a3f\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae6\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d27\U00010d30-\U00010d39\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f50\U00011000-\U00011046\U00011066-\U0001106f\U0001107f-\U000110ba\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U0001113f\U00011144-\U00011146\U00011150-\U00011173\U00011176\U00011180-\U000111c4\U000111c9-\U000111cc\U000111d0-\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U00011237\U0001123e\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112ea\U000112f0-\U000112f9\U00011300-\U00011303\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133b-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011350\U00011357\U0001135d-\U00011363\U00011366-\U0001136c\U00011370-\U00011374\U00011400-\U0001144a\U00011450-\U00011459\U0001145e\U00011480-\U000114c5\U000114c7\U000114d0-\U000114d9\U00011580-\U000115b5\U000115b8-\U000115c0\U000115d8-\U000115dd\U00011600-\U00011640\U00011644\U00011650-\U00011659\U00011680-\U000116b7\U000116c0-\U000116c9\U00011700-\U0001171a\U0001171d-\U0001172b\U00011730-\U00011739\U00011800-\U0001183a\U000118a0-\U000118e9\U000118ff\U00011a00-\U00011a3e\U00011a47\U00011a50-\U00011a83\U00011a86-\U00011a99\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c36\U00011c38-\U00011c40\U00011c50-\U00011c59\U00011c72-\U00011c8f\U00011c92-\U00011ca7\U00011ca9-\U00011cb6\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d47\U00011d50-\U00011d59\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d8e\U00011d90-\U00011d91\U00011d93-\U00011d98\U00011da0-\U00011da9\U00011ee0-\U00011ef6\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016a60-\U00016a69\U00016ad0-\U00016aed\U00016af0-\U00016af4\U00016b00-\U00016b36\U00016b40-\U00016b43\U00016b50-\U00016b59\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001bc9d-\U0001bc9e\U0001d165-\U0001d169\U0001d16d-\U0001d172\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001d7ce-\U0001d7ff\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e800-\U0001e8c4\U0001e8d0-\U0001e8d6\U0001e900-\U0001e94a\U0001e950-\U0001e959\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d\U000e0100-\U000e01ef'
+xid_continue = '0-9A-Z_a-z\xaa\xb5\xb7\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u0483-\u0487\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u05d0-\u05ea\u05ef-\u05f2\u0610-\u061a\u0620-\u0669\u066e-\u06d3\u06d5-\u06dc\u06df-\u06e8\u06ea-\u06fc\u06ff\u0710-\u074a\u074d-\u07b1\u07c0-\u07f5\u07fa\u07fd\u0800-\u082d\u0840-\u085b\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u08d3-\u08e1\u08e3-\u0963\u0966-\u096f\u0971-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc-\u09c4\u09c7-\u09c8\u09cb-\u09ce\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09f1\u09fc\u09fe\u0a01-\u0a03\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a59-\u0a5c\u0a5e\u0a66-\u0a75\u0a81-\u0a83\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0-\u0ae3\u0ae6-\u0aef\u0af9-\u0aff\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3c-\u0b44\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b63\u0b66-\u0b6f\u0b71\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd0\u0bd7\u0be6-\u0bef\u0c00-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c58-\u0c5a\u0c60-\u0c63\u0c66-\u0c6f\u0c80-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbc-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce3\u0ce6-\u0cef\u0cf1-\u0cf2\u0d00-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d44\u0d46-\u0d48\u0d4a-\u0d4e\u0d54-\u0d57\u0d5f-\u0d63\u0d66-\u0d6f\u0d7a-\u0d7f\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2-\u0df3\u0e01-\u0e3a\u0e40-\u0e4e\u0e50-\u0e59\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edf\u0f00\u0f18-\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e-\u0f47\u0f49-\u0f6c\u0f71-\u0f84\u0f86-\u0f97\u0f99-\u0fbc\u0fc6\u1000-\u1049\u1050-\u109d\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u135d-\u135f\u1369-\u1371\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u1780-\u17d3\u17d7\u17dc-\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u1820-\u1878\u1880-\u18aa\u18b0-\u18f5\u1900-\u191e\u1920-\u192b\u1930-\u193b\u1946-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u19d0-\u19da\u1a00-\u1a1b\u1a20-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa7\u1ab0-\u1abd\u1b00-\u1b4b\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1bf3\u1c00-\u1c37\u1c40-\u1c49\u1c4d-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1cd0-\u1cd2\u1cd4-\u1cf9\u1d00-\u1df9\u1dfb-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u203f-\u2040\u2054\u2071\u207f\u2090-\u209c\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d7f-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2de0-\u2dff\u3005-\u3007\u3021-\u302f\u3031-\u3035\u3038-\u303c\u3041-\u3096\u3099-\u309a\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua62b\ua640-\ua66f\ua674-\ua67d\ua67f-\ua6f1\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua827\ua840-\ua873\ua880-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f7\ua8fb\ua8fd-\ua92d\ua930-\ua953\ua960-\ua97c\ua980-\ua9c0\ua9cf-\ua9d9\ua9e0-\ua9fe\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa60-\uaa76\uaa7a-\uaac2\uaadb-\uaadd\uaae0-\uaaef\uaaf2-\uaaf6\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabea\uabec-\uabed\uabf0-\uabf9\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe00-\ufe0f\ufe20-\ufe2f\ufe33-\ufe34\ufe4d-\ufe4f\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff10-\uff19\uff21-\uff3a\uff3f\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U000102e0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U0001037a\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104a0-\U000104a9\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a38-\U00010a3a\U00010a3f\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae6\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d27\U00010d30-\U00010d39\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f50\U00011000-\U00011046\U00011066-\U0001106f\U0001107f-\U000110ba\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U0001113f\U00011144-\U00011146\U00011150-\U00011173\U00011176\U00011180-\U000111c4\U000111c9-\U000111cc\U000111d0-\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U00011237\U0001123e\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112ea\U000112f0-\U000112f9\U00011300-\U00011303\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133b-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011350\U00011357\U0001135d-\U00011363\U00011366-\U0001136c\U00011370-\U00011374\U00011400-\U0001144a\U00011450-\U00011459\U0001145e\U00011480-\U000114c5\U000114c7\U000114d0-\U000114d9\U00011580-\U000115b5\U000115b8-\U000115c0\U000115d8-\U000115dd\U00011600-\U00011640\U00011644\U00011650-\U00011659\U00011680-\U000116b7\U000116c0-\U000116c9\U00011700-\U0001171a\U0001171d-\U0001172b\U00011730-\U00011739\U00011800-\U0001183a\U000118a0-\U000118e9\U000118ff\U00011a00-\U00011a3e\U00011a47\U00011a50-\U00011a83\U00011a86-\U00011a99\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c36\U00011c38-\U00011c40\U00011c50-\U00011c59\U00011c72-\U00011c8f\U00011c92-\U00011ca7\U00011ca9-\U00011cb6\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d47\U00011d50-\U00011d59\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d8e\U00011d90-\U00011d91\U00011d93-\U00011d98\U00011da0-\U00011da9\U00011ee0-\U00011ef6\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016a60-\U00016a69\U00016ad0-\U00016aed\U00016af0-\U00016af4\U00016b00-\U00016b36\U00016b40-\U00016b43\U00016b50-\U00016b59\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001bc9d-\U0001bc9e\U0001d165-\U0001d169\U0001d16d-\U0001d172\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001d7ce-\U0001d7ff\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e800-\U0001e8c4\U0001e8d0-\U0001e8d6\U0001e900-\U0001e94a\U0001e950-\U0001e959\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d\U000e0100-\U000e01ef'
-xid_start = 'A-Z_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06e5-\u06e6\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4-\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e40-\u0e46\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a-\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118a0-\U000118df\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b40-\U00016b43\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50\U00016f93-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001e800-\U0001e8c4\U0001e900-\U0001e943\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
+xid_start = 'A-Z_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06e5-\u06e6\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4-\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e40-\u0e46\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a-\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118a0-\U000118df\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b40-\U00016b43\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50\U00016f93-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001e800-\U0001e8c4\U0001e900-\U0001e943\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs']
# Generated from unidata 11.0.0
def combine(*args):
- return ''.join(globals()[cat] for cat in args)
+ return ''.join(globals()[cat] for cat in args)
def allexcept(*args):
newcats = cats[:]
for arg in args:
newcats.remove(arg)
- return ''.join(globals()[cat] for cat in newcats)
+ return ''.join(globals()[cat] for cat in newcats)
def _handle_runs(char_list): # pragma: no cover
@@ -104,13 +104,13 @@ def _handle_runs(char_list): # pragma: no cover
if a == b:
yield a
else:
- yield '%s-%s' % (a, b)
+ yield '%s-%s' % (a, b)
if __name__ == '__main__': # pragma: no cover
import unicodedata
- categories = {'xid_start': [], 'xid_continue': []}
+ categories = {'xid_start': [], 'xid_continue': []}
with open(__file__) as fp:
content = fp.read()
@@ -124,26 +124,26 @@ if __name__ == '__main__': # pragma: no cover
if ord(c) == 0xdc00:
# Hack to avoid combining this combining with the preceeding high
# surrogate, 0xdbff, when doing a repr.
- c = '\\' + c
+ c = '\\' + c
elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d, 0x5e):
# Escape regex metachars.
- c = '\\' + c
- categories.setdefault(cat, []).append(c)
+ c = '\\' + c
+ categories.setdefault(cat, []).append(c)
# XID_START and XID_CONTINUE are special categories used for matching
# identifiers in Python 3.
if c.isidentifier():
- categories['xid_start'].append(c)
+ categories['xid_start'].append(c)
if ('a' + c).isidentifier():
- categories['xid_continue'].append(c)
+ categories['xid_continue'].append(c)
with open(__file__, 'w') as fp:
fp.write(header)
- for cat in sorted(categories):
- val = ''.join(_handle_runs(categories[cat]))
- fp.write('%s = %a\n\n' % (cat, val))
+ for cat in sorted(categories):
+ val = ''.join(_handle_runs(categories[cat]))
+ fp.write('%s = %a\n\n' % (cat, val))
- cats = sorted(categories)
+ cats = sorted(categories)
cats.remove('xid_start')
cats.remove('xid_continue')
fp.write('cats = %r\n\n' % cats)
diff --git a/contrib/python/Pygments/py3/pygments/util.py b/contrib/python/Pygments/py3/pygments/util.py
index 5d6ddc3f5b..9656a6f932 100644
--- a/contrib/python/Pygments/py3/pygments/util.py
+++ b/contrib/python/Pygments/py3/pygments/util.py
@@ -4,12 +4,12 @@
Utility functions.
- :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from io import TextIOWrapper
+from io import TextIOWrapper
split_path_re = re.compile(r'[/\\ ]')
@@ -51,7 +51,7 @@ def get_bool_opt(options, optname, default=None):
return string
elif isinstance(string, int):
return bool(string)
- elif not isinstance(string, str):
+ elif not isinstance(string, str):
raise OptionError('Invalid type %r for option %s; use '
'1/0, yes/no, true/false, on/off' % (
string, optname))
@@ -81,7 +81,7 @@ def get_int_opt(options, optname, default=None):
def get_list_opt(options, optname, default=None):
val = options.get(optname, default)
- if isinstance(val, str):
+ if isinstance(val, str):
return val.split()
elif isinstance(val, (list, tuple)):
return list(val)
@@ -171,10 +171,10 @@ def doctype_matches(text, regex):
Note that this method only checks the first part of a DOCTYPE.
eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
"""
- m = doctype_lookup_re.search(text)
+ m = doctype_lookup_re.search(text)
if m is None:
return False
- doctype = m.group(1)
+ doctype = m.group(1)
return re.compile(regex, re.I).match(doctype.strip()) is not None
@@ -194,7 +194,7 @@ def looks_like_xml(text):
try:
return _looks_like_xml_cache[key]
except KeyError:
- m = doctype_lookup_re.search(text)
+ m = doctype_lookup_re.search(text)
if m is not None:
return True
rv = tag_re.search(text[:1000]) is not None
@@ -202,10 +202,10 @@ def looks_like_xml(text):
return rv
-def surrogatepair(c):
- """Given a unicode character code with length greater than 16 bits,
- return the two 16 bit surrogate pair.
- """
+def surrogatepair(c):
+ """Given a unicode character code with length greater than 16 bits,
+ return the two 16 bit surrogate pair.
+ """
# From example D28 of:
# http://www.unicode.org/book/ch03.pdf
return (0xd7c0 + (c >> 10), (0xdc00 + (c & 0x3ff)))
@@ -246,7 +246,7 @@ def duplicates_removed(it, already_seen=()):
return lst
-class Future:
+class Future:
"""Generic class to defer some work.
Handled specially in RegexLexerMeta, to support regex string construction at
@@ -302,7 +302,7 @@ def terminal_encoding(term):
return locale.getpreferredencoding()
-class UnclosingTextIOWrapper(TextIOWrapper):
- # Don't close underlying buffer on destruction.
- def close(self):
- self.flush()
+class UnclosingTextIOWrapper(TextIOWrapper):
+ # Don't close underlying buffer on destruction.
+ def close(self):
+ self.flush()
diff --git a/contrib/python/Pygments/py3/ya.make b/contrib/python/Pygments/py3/ya.make
index 3fab931499..b0d6775d2e 100644
--- a/contrib/python/Pygments/py3/ya.make
+++ b/contrib/python/Pygments/py3/ya.make
@@ -1,15 +1,15 @@
-# Generated by devtools/yamaker (pypi).
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
-PY3_LIBRARY()
-
-OWNER(blinkov g:python-contrib)
+OWNER(blinkov g:python-contrib)
VERSION(2.11.2)
-LICENSE(BSD-3-Clause)
-
-NO_LINT()
+LICENSE(BSD-3-Clause)
+NO_LINT()
+
NO_CHECK_IMPORTS(
pygments.sphinxext
)
@@ -17,7 +17,7 @@ NO_CHECK_IMPORTS(
PY_SRCS(
TOP_LEVEL
pygments/__init__.py
- pygments/__main__.py
+ pygments/__main__.py
pygments/cmdline.py
pygments/console.py
pygments/filter.py
@@ -32,7 +32,7 @@ PY_SRCS(
pygments/formatters/irc.py
pygments/formatters/latex.py
pygments/formatters/other.py
- pygments/formatters/pangomarkup.py
+ pygments/formatters/pangomarkup.py
pygments/formatters/rtf.py
pygments/formatters/svg.py
pygments/formatters/terminal.py
@@ -43,13 +43,13 @@ PY_SRCS(
pygments/lexers/_cl_builtins.py
pygments/lexers/_cocoa_builtins.py
pygments/lexers/_csound_builtins.py
- pygments/lexers/_julia_builtins.py
+ pygments/lexers/_julia_builtins.py
pygments/lexers/_lasso_builtins.py
pygments/lexers/_lilypond_builtins.py
pygments/lexers/_lua_builtins.py
pygments/lexers/_mapping.py
pygments/lexers/_mql_builtins.py
- pygments/lexers/_mysql_builtins.py
+ pygments/lexers/_mysql_builtins.py
pygments/lexers/_openedge_builtins.py
pygments/lexers/_php_builtins.py
pygments/lexers/_postgres_builtins.py
@@ -58,23 +58,23 @@ PY_SRCS(
pygments/lexers/_stan_builtins.py
pygments/lexers/_stata_builtins.py
pygments/lexers/_tsql_builtins.py
- pygments/lexers/_usd_builtins.py
+ pygments/lexers/_usd_builtins.py
pygments/lexers/_vbscript_builtins.py
pygments/lexers/_vim_builtins.py
pygments/lexers/actionscript.py
pygments/lexers/agile.py
pygments/lexers/algebra.py
pygments/lexers/ambient.py
- pygments/lexers/amdgpu.py
+ pygments/lexers/amdgpu.py
pygments/lexers/ampl.py
- pygments/lexers/apdlexer.py
+ pygments/lexers/apdlexer.py
pygments/lexers/apl.py
pygments/lexers/archetype.py
- pygments/lexers/arrow.py
- pygments/lexers/asc.py
+ pygments/lexers/arrow.py
+ pygments/lexers/asc.py
pygments/lexers/asm.py
pygments/lexers/automation.py
- pygments/lexers/bare.py
+ pygments/lexers/bare.py
pygments/lexers/basic.py
pygments/lexers/bdd.py
pygments/lexers/bibtex.py
@@ -83,7 +83,7 @@ PY_SRCS(
pygments/lexers/c_cpp.py
pygments/lexers/c_like.py
pygments/lexers/capnproto.py
- pygments/lexers/cddl.py
+ pygments/lexers/cddl.py
pygments/lexers/chapel.py
pygments/lexers/clean.py
pygments/lexers/compiled.py
@@ -95,7 +95,7 @@ PY_SRCS(
pygments/lexers/d.py
pygments/lexers/dalvik.py
pygments/lexers/data.py
- pygments/lexers/devicetree.py
+ pygments/lexers/devicetree.py
pygments/lexers/diff.py
pygments/lexers/dotnet.py
pygments/lexers/dsls.py
@@ -104,7 +104,7 @@ PY_SRCS(
pygments/lexers/eiffel.py
pygments/lexers/elm.py
pygments/lexers/elpi.py
- pygments/lexers/email.py
+ pygments/lexers/email.py
pygments/lexers/erlang.py
pygments/lexers/esoteric.py
pygments/lexers/ezhil.py
@@ -117,15 +117,15 @@ PY_SRCS(
pygments/lexers/foxpro.py
pygments/lexers/freefem.py
pygments/lexers/functional.py
- pygments/lexers/futhark.py
- pygments/lexers/gcodelexer.py
- pygments/lexers/gdscript.py
+ pygments/lexers/futhark.py
+ pygments/lexers/gcodelexer.py
+ pygments/lexers/gdscript.py
pygments/lexers/go.py
pygments/lexers/grammar_notation.py
pygments/lexers/graph.py
pygments/lexers/graphics.py
- pygments/lexers/graphviz.py
- pygments/lexers/gsql.py
+ pygments/lexers/graphviz.py
+ pygments/lexers/gsql.py
pygments/lexers/haskell.py
pygments/lexers/haxe.py
pygments/lexers/hdl.py
@@ -139,10 +139,10 @@ PY_SRCS(
pygments/lexers/iolang.py
pygments/lexers/j.py
pygments/lexers/javascript.py
- pygments/lexers/jslt.py
+ pygments/lexers/jslt.py
pygments/lexers/julia.py
pygments/lexers/jvm.py
- pygments/lexers/kuin.py
+ pygments/lexers/kuin.py
pygments/lexers/lilypond.py
pygments/lexers/lisp.py
pygments/lexers/make.py
@@ -150,13 +150,13 @@ PY_SRCS(
pygments/lexers/math.py
pygments/lexers/matlab.py
pygments/lexers/maxima.py
- pygments/lexers/meson.py
- pygments/lexers/mime.py
+ pygments/lexers/meson.py
+ pygments/lexers/mime.py
pygments/lexers/ml.py
pygments/lexers/modeling.py
pygments/lexers/modula2.py
pygments/lexers/monte.py
- pygments/lexers/mosel.py
+ pygments/lexers/mosel.py
pygments/lexers/ncl.py
pygments/lexers/nimrod.py
pygments/lexers/nit.py
@@ -171,19 +171,19 @@ PY_SRCS(
pygments/lexers/pawn.py
pygments/lexers/perl.py
pygments/lexers/php.py
- pygments/lexers/pointless.py
+ pygments/lexers/pointless.py
pygments/lexers/pony.py
pygments/lexers/praat.py
- pygments/lexers/procfile.py
+ pygments/lexers/procfile.py
pygments/lexers/prolog.py
- pygments/lexers/promql.py
+ pygments/lexers/promql.py
pygments/lexers/python.py
pygments/lexers/qvt.py
pygments/lexers/r.py
pygments/lexers/rdf.py
pygments/lexers/rebol.py
pygments/lexers/resource.py
- pygments/lexers/ride.py
+ pygments/lexers/ride.py
pygments/lexers/rita.py
pygments/lexers/rnc.py
pygments/lexers/roboconf.py
@@ -192,17 +192,17 @@ PY_SRCS(
pygments/lexers/rust.py
pygments/lexers/sas.py
pygments/lexers/savi.py
- pygments/lexers/scdoc.py
+ pygments/lexers/scdoc.py
pygments/lexers/scripting.py
pygments/lexers/sgf.py
pygments/lexers/shell.py
- pygments/lexers/sieve.py
+ pygments/lexers/sieve.py
pygments/lexers/slash.py
pygments/lexers/smalltalk.py
- pygments/lexers/smithy.py
+ pygments/lexers/smithy.py
pygments/lexers/smv.py
pygments/lexers/snobol.py
- pygments/lexers/solidity.py
+ pygments/lexers/solidity.py
pygments/lexers/sophia.py
pygments/lexers/special.py
pygments/lexers/spice.py
@@ -211,7 +211,7 @@ PY_SRCS(
pygments/lexers/stata.py
pygments/lexers/supercollider.py
pygments/lexers/tcl.py
- pygments/lexers/teal.py
+ pygments/lexers/teal.py
pygments/lexers/templates.py
pygments/lexers/teraterm.py
pygments/lexers/testing.py
@@ -219,24 +219,24 @@ PY_SRCS(
pygments/lexers/textedit.py
pygments/lexers/textfmts.py
pygments/lexers/theorem.py
- pygments/lexers/thingsdb.py
- pygments/lexers/tnt.py
+ pygments/lexers/thingsdb.py
+ pygments/lexers/tnt.py
pygments/lexers/trafficscript.py
pygments/lexers/typoscript.py
pygments/lexers/unicon.py
pygments/lexers/urbi.py
- pygments/lexers/usd.py
+ pygments/lexers/usd.py
pygments/lexers/varnish.py
pygments/lexers/verification.py
pygments/lexers/web.py
- pygments/lexers/webassembly.py
- pygments/lexers/webidl.py
+ pygments/lexers/webassembly.py
+ pygments/lexers/webidl.py
pygments/lexers/webmisc.py
pygments/lexers/whiley.py
pygments/lexers/x10.py
pygments/lexers/xorg.py
- pygments/lexers/yang.py
- pygments/lexers/zig.py
+ pygments/lexers/yang.py
+ pygments/lexers/zig.py
pygments/modeline.py
pygments/plugin.py
pygments/regexopt.py
@@ -258,13 +258,13 @@ PY_SRCS(
pygments/styles/friendly.py
pygments/styles/friendly_grayscale.py
pygments/styles/fruity.py
- pygments/styles/gruvbox.py
+ pygments/styles/gruvbox.py
pygments/styles/igor.py
- pygments/styles/inkpot.py
+ pygments/styles/inkpot.py
pygments/styles/lilypond.py
pygments/styles/lovelace.py
pygments/styles/manni.py
- pygments/styles/material.py
+ pygments/styles/material.py
pygments/styles/monokai.py
pygments/styles/murphy.py
pygments/styles/native.py
@@ -284,14 +284,14 @@ PY_SRCS(
pygments/styles/vim.py
pygments/styles/vs.py
pygments/styles/xcode.py
- pygments/styles/zenburn.py
+ pygments/styles/zenburn.py
pygments/token.py
pygments/unistring.py
pygments/util.py
)
RESOURCE_FILES(
- PREFIX contrib/python/Pygments/py3/
+ PREFIX contrib/python/Pygments/py3/
.dist-info/METADATA
.dist-info/entry_points.txt
.dist-info/top_level.txt
diff --git a/contrib/python/Pygments/ya.make b/contrib/python/Pygments/ya.make
index 9aec5a65a8..009956d6a1 100644
--- a/contrib/python/Pygments/ya.make
+++ b/contrib/python/Pygments/ya.make
@@ -2,19 +2,19 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
-OWNER(g:python-contrib)
+OWNER(g:python-contrib)
-IF (PYTHON2)
- PEERDIR(contrib/python/Pygments/py2)
-ELSE()
- PEERDIR(contrib/python/Pygments/py3)
-ENDIF()
+IF (PYTHON2)
+ PEERDIR(contrib/python/Pygments/py2)
+ELSE()
+ PEERDIR(contrib/python/Pygments/py3)
+ENDIF()
NO_LINT()
-END()
+END()
-RECURSE(
- py2
- py3
+RECURSE(
+ py2
+ py3
)